Moving MediaStreamSignaling logic into PeerConnection.

This needs to happen because in the future, m-lines will be offered
based on the set of RtpSenders/RtpReceivers, rather than the set of
tracks that MediaStreamSignaling knows about.

Besides that, MediaStreamSignaling was a "glue class" without
a clearly defined role, so it going away is good for other
reasons as well.

Review URL: https://codereview.webrtc.org/1393563002

Cr-Commit-Position: refs/heads/master@{#10268}
diff --git a/talk/app/webrtc/datachannel.cc b/talk/app/webrtc/datachannel.cc
index 2028dc9..20cf743 100644
--- a/talk/app/webrtc/datachannel.cc
+++ b/talk/app/webrtc/datachannel.cc
@@ -31,6 +31,7 @@
 
 #include "talk/app/webrtc/mediastreamprovider.h"
 #include "talk/app/webrtc/sctputils.h"
+#include "talk/media/sctp/sctpdataengine.h"
 #include "webrtc/base/logging.h"
 #include "webrtc/base/refcount.h"
 
@@ -43,6 +44,42 @@
   MSG_CHANNELREADY,
 };
 
+bool SctpSidAllocator::AllocateSid(rtc::SSLRole role, int* sid) {
+  int potential_sid = (role == rtc::SSL_CLIENT) ? 0 : 1;
+  while (!IsSidAvailable(potential_sid)) {
+    potential_sid += 2;
+    if (potential_sid > static_cast<int>(cricket::kMaxSctpSid)) {
+      return false;
+    }
+  }
+
+  *sid = potential_sid;
+  used_sids_.insert(potential_sid);
+  return true;
+}
+
+bool SctpSidAllocator::ReserveSid(int sid) {
+  if (!IsSidAvailable(sid)) {
+    return false;
+  }
+  used_sids_.insert(sid);
+  return true;
+}
+
+void SctpSidAllocator::ReleaseSid(int sid) {
+  auto it = used_sids_.find(sid);
+  if (it != used_sids_.end()) {
+    used_sids_.erase(it);
+  }
+}
+
+bool SctpSidAllocator::IsSidAvailable(int sid) const {
+  if (sid < 0 || sid > static_cast<int>(cricket::kMaxSctpSid)) {
+    return false;
+  }
+  return used_sids_.find(sid) == used_sids_.end();
+}
+
 DataChannel::PacketQueue::PacketQueue() : byte_count_(0) {}
 
 DataChannel::PacketQueue::~PacketQueue() {
@@ -257,8 +294,9 @@
 
 void DataChannel::SetSctpSid(int sid) {
   ASSERT(config_.id < 0 && sid >= 0 && data_channel_type_ == cricket::DCT_SCTP);
-  if (config_.id == sid)
+  if (config_.id == sid) {
     return;
+  }
 
   config_.id = sid;
   provider_->AddSctpDataStream(sid);
@@ -276,6 +314,13 @@
   }
 }
 
+// The underlying transport channel was destroyed.
+// This function makes sure the DataChannel is disconnected and changes state to
+// kClosed.
+void DataChannel::OnTransportChannelDestroyed() {
+  DoClose();
+}
+
 void DataChannel::SetSendSsrc(uint32_t send_ssrc) {
   ASSERT(data_channel_type_ == cricket::DCT_RTP);
   if (send_ssrc_set_) {
@@ -294,13 +339,6 @@
   }
 }
 
-// The underlaying data engine is closing.
-// This function makes sure the DataChannel is disconnected and changes state to
-// kClosed.
-void DataChannel::OnDataEngineClose() {
-  DoClose();
-}
-
 void DataChannel::OnDataReceived(cricket::DataChannel* channel,
                                  const cricket::ReceiveDataParams& params,
                                  const rtc::Buffer& payload) {
@@ -361,6 +399,12 @@
   }
 }
 
+void DataChannel::OnStreamClosedRemotely(uint32_t sid) {
+  if (data_channel_type_ == cricket::DCT_SCTP && sid == config_.id) {
+    Close();
+  }
+}
+
 void DataChannel::OnChannelReady(bool writable) {
   writable_ = writable;
   if (!writable) {
@@ -436,13 +480,17 @@
 }
 
 void DataChannel::SetState(DataState state) {
-  if (state_ == state)
+  if (state_ == state) {
     return;
+  }
 
   state_ = state;
   if (observer_) {
     observer_->OnStateChange();
   }
+  if (state_ == kClosed) {
+    SignalClosed(this);
+  }
 }
 
 void DataChannel::DisconnectFromProvider() {
diff --git a/talk/app/webrtc/datachannel.h b/talk/app/webrtc/datachannel.h
index 4506f71..2713ae3 100644
--- a/talk/app/webrtc/datachannel.h
+++ b/talk/app/webrtc/datachannel.h
@@ -29,6 +29,7 @@
 #define TALK_APP_WEBRTC_DATACHANNEL_H_
 
 #include <deque>
+#include <set>
 #include <string>
 
 #include "talk/app/webrtc/datachannelinterface.h"
@@ -83,6 +84,28 @@
   OpenHandshakeRole open_handshake_role;
 };
 
+// Helper class to allocate unique IDs for SCTP DataChannels
+class SctpSidAllocator {
+ public:
+  // Gets the first unused odd/even id based on the DTLS role. If |role| is
+  // SSL_CLIENT, the allocated id starts from 0 and takes even numbers;
+  // otherwise, the id starts from 1 and takes odd numbers.
+  // Returns false if no id can be allocated.
+  bool AllocateSid(rtc::SSLRole role, int* sid);
+
+  // Attempts to reserve a specific sid. Returns false if it's unavailable.
+  bool ReserveSid(int sid);
+
+  // Indicates that |sid| isn't in use any more, and is thus available again.
+  void ReleaseSid(int sid);
+
+ private:
+  // Checks if |sid| is available to be assigned to a new SCTP data channel.
+  bool IsSidAvailable(int sid) const;
+
+  std::set<int> used_sids_;
+};
+
 // DataChannel is a an implementation of the DataChannelInterface based on
 // libjingle's data engine. It provides an implementation of unreliable or
 // reliabledata channels. Currently this class is specifically designed to use
@@ -129,9 +152,6 @@
   // rtc::MessageHandler override.
   virtual void OnMessage(rtc::Message* msg);
 
-  // Called if the underlying data engine is closing.
-  void OnDataEngineClose();
-
   // Called when the channel's ready to use.  That can happen when the
   // underlying DataMediaChannel becomes ready, or when this channel is a new
   // stream on an existing DataMediaChannel, and we've finished negotiation.
@@ -141,6 +161,7 @@
   void OnDataReceived(cricket::DataChannel* channel,
                       const cricket::ReceiveDataParams& params,
                       const rtc::Buffer& payload);
+  void OnStreamClosedRemotely(uint32_t sid);
 
   // The remote peer request that this channel should be closed.
   void RemotePeerRequestClose();
@@ -151,7 +172,10 @@
   // be called once.
   void SetSctpSid(int sid);
   // Called when the transport channel is created.
+  // Only needs to be called for SCTP data channels.
   void OnTransportChannelCreated();
+  // Called when the transport channel is destroyed.
+  void OnTransportChannelDestroyed();
 
   // The following methods are for RTP only.
 
@@ -167,6 +191,11 @@
     return data_channel_type_;
   }
 
+  // Emitted when state transitions to kClosed.
+  // In the case of SCTP channels, this signal can be used to tell when the
+  // channel's sid is free.
+  sigslot::signal1<DataChannel*> SignalClosed;
+
  protected:
   DataChannel(DataChannelProviderInterface* client,
               cricket::DataChannelType dct,
@@ -247,16 +276,6 @@
   PacketQueue queued_send_data_;
 };
 
-class DataChannelFactory {
- public:
-  virtual rtc::scoped_refptr<DataChannel> CreateDataChannel(
-      const std::string& label,
-      const InternalDataChannelInit* config) = 0;
-
- protected:
-  virtual ~DataChannelFactory() {}
-};
-
 // Define proxy for DataChannelInterface.
 BEGIN_PROXY_MAP(DataChannel)
   PROXY_METHOD1(void, RegisterObserver, DataChannelObserver*)
diff --git a/talk/app/webrtc/datachannel_unittest.cc b/talk/app/webrtc/datachannel_unittest.cc
index b4f611e..ff79541 100644
--- a/talk/app/webrtc/datachannel_unittest.cc
+++ b/talk/app/webrtc/datachannel_unittest.cc
@@ -31,6 +31,7 @@
 #include "webrtc/base/gunit.h"
 
 using webrtc::DataChannel;
+using webrtc::SctpSidAllocator;
 
 class FakeDataChannelObserver : public webrtc::DataChannelObserver {
  public:
@@ -506,3 +507,75 @@
   webrtc_data_channel_->OnTransportChannelCreated();
   webrtc_data_channel_->Close();
 }
+
+class SctpSidAllocatorTest : public testing::Test {
+ protected:
+  SctpSidAllocator allocator_;
+};
+
+// Verifies that an even SCTP id is allocated for SSL_CLIENT and an odd id for
+// SSL_SERVER.
+TEST_F(SctpSidAllocatorTest, SctpIdAllocationBasedOnRole) {
+  int id;
+  EXPECT_TRUE(allocator_.AllocateSid(rtc::SSL_SERVER, &id));
+  EXPECT_EQ(1, id);
+  EXPECT_TRUE(allocator_.AllocateSid(rtc::SSL_CLIENT, &id));
+  EXPECT_EQ(0, id);
+  EXPECT_TRUE(allocator_.AllocateSid(rtc::SSL_SERVER, &id));
+  EXPECT_EQ(3, id);
+  EXPECT_TRUE(allocator_.AllocateSid(rtc::SSL_CLIENT, &id));
+  EXPECT_EQ(2, id);
+}
+
+// Verifies that SCTP ids of existing DataChannels are not reused.
+TEST_F(SctpSidAllocatorTest, SctpIdAllocationNoReuse) {
+  int old_id = 1;
+  EXPECT_TRUE(allocator_.ReserveSid(old_id));
+
+  int new_id;
+  EXPECT_TRUE(allocator_.AllocateSid(rtc::SSL_SERVER, &new_id));
+  EXPECT_NE(old_id, new_id);
+
+  old_id = 0;
+  EXPECT_TRUE(allocator_.ReserveSid(old_id));
+  EXPECT_TRUE(allocator_.AllocateSid(rtc::SSL_CLIENT, &new_id));
+  EXPECT_NE(old_id, new_id);
+}
+
+// Verifies that SCTP ids of removed DataChannels can be reused.
+TEST_F(SctpSidAllocatorTest, SctpIdReusedForRemovedDataChannel) {
+  int odd_id = 1;
+  int even_id = 0;
+  EXPECT_TRUE(allocator_.ReserveSid(odd_id));
+  EXPECT_TRUE(allocator_.ReserveSid(even_id));
+
+  int allocated_id = -1;
+  EXPECT_TRUE(allocator_.AllocateSid(rtc::SSL_SERVER, &allocated_id));
+  EXPECT_EQ(odd_id + 2, allocated_id);
+
+  EXPECT_TRUE(allocator_.AllocateSid(rtc::SSL_CLIENT, &allocated_id));
+  EXPECT_EQ(even_id + 2, allocated_id);
+
+  EXPECT_TRUE(allocator_.AllocateSid(rtc::SSL_SERVER, &allocated_id));
+  EXPECT_EQ(odd_id + 4, allocated_id);
+
+  EXPECT_TRUE(allocator_.AllocateSid(rtc::SSL_CLIENT, &allocated_id));
+  EXPECT_EQ(even_id + 4, allocated_id);
+
+  allocator_.ReleaseSid(odd_id);
+  allocator_.ReleaseSid(even_id);
+
+  // Verifies that removed ids are reused.
+  EXPECT_TRUE(allocator_.AllocateSid(rtc::SSL_SERVER, &allocated_id));
+  EXPECT_EQ(odd_id, allocated_id);
+
+  EXPECT_TRUE(allocator_.AllocateSid(rtc::SSL_CLIENT, &allocated_id));
+  EXPECT_EQ(even_id, allocated_id);
+
+  // Verifies that used higher ids are not reused.
+  EXPECT_TRUE(allocator_.AllocateSid(rtc::SSL_SERVER, &allocated_id));
+  EXPECT_EQ(odd_id + 6, allocated_id);
+
+  EXPECT_TRUE(allocator_.AllocateSid(rtc::SSL_CLIENT, &allocated_id));
+  EXPECT_EQ(even_id + 6, allocated_id);
+}
diff --git a/talk/app/webrtc/mediastreamsignaling.cc b/talk/app/webrtc/mediastreamsignaling.cc
index c12471c..b405273 100644
--- a/talk/app/webrtc/mediastreamsignaling.cc
+++ b/talk/app/webrtc/mediastreamsignaling.cc
@@ -27,1000 +27,4 @@
 
 #include "talk/app/webrtc/mediastreamsignaling.h"
 
-#include <vector>
-
-#include "talk/app/webrtc/audiotrack.h"
-#include "talk/app/webrtc/mediaconstraintsinterface.h"
-#include "talk/app/webrtc/mediastreamproxy.h"
-#include "talk/app/webrtc/mediastreamtrackproxy.h"
-#include "talk/app/webrtc/remoteaudiosource.h"
-#include "talk/app/webrtc/remotevideocapturer.h"
-#include "talk/app/webrtc/sctputils.h"
-#include "talk/app/webrtc/videosource.h"
-#include "talk/app/webrtc/videotrack.h"
-#include "talk/media/sctp/sctpdataengine.h"
-#include "webrtc/base/bytebuffer.h"
-#include "webrtc/base/stringutils.h"
-
-static const char kDefaultStreamLabel[] = "default";
-static const char kDefaultAudioTrackLabel[] = "defaulta0";
-static const char kDefaultVideoTrackLabel[] = "defaultv0";
-
-namespace webrtc {
-
-using rtc::scoped_ptr;
-using rtc::scoped_refptr;
-
-static bool ParseConstraintsForAnswer(
-    const MediaConstraintsInterface* constraints,
-    cricket::MediaSessionOptions* options) {
-  bool value = false;
-  size_t mandatory_constraints_satisfied = 0;
-
-  // kOfferToReceiveAudio defaults to true according to spec.
-  if (!FindConstraint(constraints,
-                      MediaConstraintsInterface::kOfferToReceiveAudio,
-                      &value, &mandatory_constraints_satisfied) || value) {
-    options->recv_audio = true;
-  }
-
-  // kOfferToReceiveVideo defaults to false according to spec. But
-  // if it is an answer and video is offered, we should still accept video
-  // per default.
-  value = false;
-  if (!FindConstraint(constraints,
-                      MediaConstraintsInterface::kOfferToReceiveVideo,
-                      &value, &mandatory_constraints_satisfied) || value) {
-    options->recv_video = true;
-  }
-
-  if (FindConstraint(constraints,
-                     MediaConstraintsInterface::kVoiceActivityDetection,
-                     &value, &mandatory_constraints_satisfied)) {
-    options->vad_enabled = value;
-  }
-
-  if (FindConstraint(constraints,
-                     MediaConstraintsInterface::kUseRtpMux,
-                     &value, &mandatory_constraints_satisfied)) {
-    options->bundle_enabled = value;
-  } else {
-    // kUseRtpMux defaults to true according to spec.
-    options->bundle_enabled = true;
-  }
-  if (FindConstraint(constraints,
-                     MediaConstraintsInterface::kIceRestart,
-                     &value, &mandatory_constraints_satisfied)) {
-    options->transport_options.ice_restart = value;
-  } else {
-    // kIceRestart defaults to false according to spec.
-    options->transport_options.ice_restart = false;
-  }
-
-  if (!constraints) {
-    return true;
-  }
-  return mandatory_constraints_satisfied == constraints->GetMandatory().size();
-}
-
-// Returns true if if at least one media content is present and
-// |options.bundle_enabled| is true.
-// Bundle will be enabled  by default if at least one media content is present
-// and the constraint kUseRtpMux has not disabled bundle.
-static bool EvaluateNeedForBundle(const cricket::MediaSessionOptions& options) {
-  return options.bundle_enabled &&
-      (options.has_audio() || options.has_video() || options.has_data());
-}
-
-static bool MediaContentDirectionHasSend(cricket::MediaContentDirection dir) {
-  return dir == cricket::MD_SENDONLY || dir == cricket::MD_SENDRECV;
-}
-
-static bool IsValidOfferToReceiveMedia(int value) {
-  typedef PeerConnectionInterface::RTCOfferAnswerOptions Options;
-  return (value >= Options::kUndefined) &&
-      (value <= Options::kMaxOfferToReceiveMedia);
-}
-
-// Add the stream and RTP data channel info to |session_options|.
-static void SetStreams(
-    cricket::MediaSessionOptions* session_options,
-    rtc::scoped_refptr<StreamCollection> streams,
-    const MediaStreamSignaling::RtpDataChannels& rtp_data_channels) {
-  session_options->streams.clear();
-  if (streams != NULL) {
-    for (size_t i = 0; i < streams->count(); ++i) {
-      MediaStreamInterface* stream = streams->at(i);
-
-      AudioTrackVector audio_tracks(stream->GetAudioTracks());
-
-      // For each audio track in the stream, add it to the MediaSessionOptions.
-      for (size_t j = 0; j < audio_tracks.size(); ++j) {
-        scoped_refptr<MediaStreamTrackInterface> track(audio_tracks[j]);
-        session_options->AddSendStream(
-            cricket::MEDIA_TYPE_AUDIO, track->id(), stream->label());
-      }
-
-      VideoTrackVector video_tracks(stream->GetVideoTracks());
-
-      // For each video track in the stream, add it to the MediaSessionOptions.
-      for (size_t j = 0; j < video_tracks.size(); ++j) {
-        scoped_refptr<MediaStreamTrackInterface> track(video_tracks[j]);
-        session_options->AddSendStream(
-            cricket::MEDIA_TYPE_VIDEO, track->id(), stream->label());
-      }
-    }
-  }
-
-  // Check for data channels.
-  MediaStreamSignaling::RtpDataChannels::const_iterator data_channel_it =
-      rtp_data_channels.begin();
-  for (; data_channel_it != rtp_data_channels.end(); ++data_channel_it) {
-    const DataChannel* channel = data_channel_it->second;
-    if (channel->state() == DataChannel::kConnecting ||
-        channel->state() == DataChannel::kOpen) {
-      // |streamid| and |sync_label| are both set to the DataChannel label
-      // here so they can be signaled the same way as MediaStreams and Tracks.
-      // For MediaStreams, the sync_label is the MediaStream label and the
-      // track label is the same as |streamid|.
-      const std::string& streamid = channel->label();
-      const std::string& sync_label = channel->label();
-      session_options->AddSendStream(
-          cricket::MEDIA_TYPE_DATA, streamid, sync_label);
-    }
-  }
-}
-
-// Factory class for creating remote MediaStreams and MediaStreamTracks.
-class RemoteMediaStreamFactory {
- public:
-  explicit RemoteMediaStreamFactory(rtc::Thread* signaling_thread,
-                                    cricket::ChannelManager* channel_manager)
-      : signaling_thread_(signaling_thread),
-        channel_manager_(channel_manager) {
-  }
-
-  rtc::scoped_refptr<MediaStreamInterface> CreateMediaStream(
-      const std::string& stream_label) {
-    return MediaStreamProxy::Create(
-        signaling_thread_, MediaStream::Create(stream_label));
-  }
-
-  AudioTrackInterface* AddAudioTrack(webrtc::MediaStreamInterface* stream,
-                                     const std::string& track_id) {
-    return AddTrack<AudioTrackInterface, AudioTrack, AudioTrackProxy>(
-        stream, track_id, RemoteAudioSource::Create().get());
-  }
-
-  VideoTrackInterface* AddVideoTrack(webrtc::MediaStreamInterface* stream,
-                                     const std::string& track_id) {
-    return AddTrack<VideoTrackInterface, VideoTrack, VideoTrackProxy>(
-        stream, track_id, VideoSource::Create(channel_manager_,
-                                              new RemoteVideoCapturer(),
-                                              NULL).get());
-  }
-
- private:
-  template <typename TI, typename T, typename TP, typename S>
-  TI* AddTrack(MediaStreamInterface* stream, const std::string& track_id,
-               S* source) {
-    rtc::scoped_refptr<TI> track(
-        TP::Create(signaling_thread_, T::Create(track_id, source)));
-    track->set_state(webrtc::MediaStreamTrackInterface::kLive);
-    if (stream->AddTrack(track)) {
-      return track;
-    }
-    return NULL;
-  }
-
-  rtc::Thread* signaling_thread_;
-  cricket::ChannelManager* channel_manager_;
-};
-
-MediaStreamSignaling::MediaStreamSignaling(
-    rtc::Thread* signaling_thread,
-    MediaStreamSignalingObserver* stream_observer,
-    cricket::ChannelManager* channel_manager)
-    : signaling_thread_(signaling_thread),
-      data_channel_factory_(NULL),
-      stream_observer_(stream_observer),
-      local_streams_(StreamCollection::Create()),
-      remote_streams_(StreamCollection::Create()),
-      remote_stream_factory_(new RemoteMediaStreamFactory(signaling_thread,
-                                                          channel_manager)),
-      last_allocated_sctp_even_sid_(-2),
-      last_allocated_sctp_odd_sid_(-1) {
-}
-
-MediaStreamSignaling::~MediaStreamSignaling() {
-}
-
-void MediaStreamSignaling::TearDown() {
-  OnAudioChannelClose();
-  OnVideoChannelClose();
-  OnDataChannelClose();
-}
-
-bool MediaStreamSignaling::IsSctpSidAvailable(int sid) const {
-  if (sid < 0 || sid > static_cast<int>(cricket::kMaxSctpSid))
-    return false;
-
-  return FindDataChannelBySid(sid) < 0;
-}
-
-// Gets the first unused odd/even id based on the DTLS role. If |role| is
-// SSL_CLIENT, the allocated id starts from 0 and takes even numbers; otherwise,
-// the id starts from 1 and takes odd numbers. Returns false if no id can be
-// allocated.
-bool MediaStreamSignaling::AllocateSctpSid(rtc::SSLRole role, int* sid) {
-  int& last_id = (role == rtc::SSL_CLIENT) ?
-      last_allocated_sctp_even_sid_ : last_allocated_sctp_odd_sid_;
-
-  do {
-    last_id += 2;
-  } while (last_id <= static_cast<int>(cricket::kMaxSctpSid) &&
-           !IsSctpSidAvailable(last_id));
-
-  if (last_id > static_cast<int>(cricket::kMaxSctpSid)) {
-    return false;
-  }
-
-  *sid = last_id;
-  return true;
-}
-
-bool MediaStreamSignaling::HasDataChannels() const {
-  return !rtp_data_channels_.empty() || !sctp_data_channels_.empty();
-}
-
-bool MediaStreamSignaling::AddDataChannel(DataChannel* data_channel) {
-  ASSERT(data_channel != NULL);
-  if (data_channel->data_channel_type() == cricket::DCT_RTP) {
-    if (rtp_data_channels_.find(data_channel->label()) !=
-        rtp_data_channels_.end()) {
-      LOG(LS_ERROR) << "DataChannel with label " << data_channel->label()
-                    << " already exists.";
-      return false;
-    }
-    rtp_data_channels_[data_channel->label()] = data_channel;
-  } else {
-    ASSERT(data_channel->data_channel_type() == cricket::DCT_SCTP);
-    sctp_data_channels_.push_back(data_channel);
-  }
-  return true;
-}
-
-bool MediaStreamSignaling::AddDataChannelFromOpenMessage(
-    const cricket::ReceiveDataParams& params,
-    const rtc::Buffer& payload) {
-  if (!data_channel_factory_) {
-    LOG(LS_WARNING) << "Remote peer requested a DataChannel but DataChannels "
-                    << "are not supported.";
-    return false;
-  }
-
-  std::string label;
-  InternalDataChannelInit config;
-  config.id = params.ssrc;
-  if (!ParseDataChannelOpenMessage(payload, &label, &config)) {
-    LOG(LS_WARNING) << "Failed to parse the OPEN message for sid "
-                    << params.ssrc;
-    return false;
-  }
-  config.open_handshake_role = InternalDataChannelInit::kAcker;
-
-  scoped_refptr<DataChannel> channel(
-      data_channel_factory_->CreateDataChannel(label, &config));
-  if (!channel.get()) {
-    LOG(LS_ERROR) << "Failed to create DataChannel from the OPEN message.";
-    return false;
-  }
-
-  stream_observer_->OnAddDataChannel(channel);
-  return true;
-}
-
-void MediaStreamSignaling::RemoveSctpDataChannel(int sid) {
-  ASSERT(sid >= 0);
-  for (SctpDataChannels::iterator iter = sctp_data_channels_.begin();
-       iter != sctp_data_channels_.end();
-       ++iter) {
-    if ((*iter)->id() == sid) {
-      sctp_data_channels_.erase(iter);
-
-      if (rtc::IsEven(sid) && sid <= last_allocated_sctp_even_sid_) {
-        last_allocated_sctp_even_sid_ = sid - 2;
-      } else if (rtc::IsOdd(sid) && sid <= last_allocated_sctp_odd_sid_) {
-        last_allocated_sctp_odd_sid_ = sid - 2;
-      }
-      return;
-    }
-  }
-}
-
-bool MediaStreamSignaling::AddLocalStream(MediaStreamInterface* local_stream) {
-  if (local_streams_->find(local_stream->label()) != NULL) {
-    LOG(LS_WARNING) << "MediaStream with label " << local_stream->label()
-                    << "already exist.";
-    return false;
-  }
-  local_streams_->AddStream(local_stream);
-
-  // Find tracks that has already been configured in SDP. This can occur if a
-  // local session description that contains the MSID of these tracks is set
-  // before AddLocalStream is called. It can also occur if the local session
-  // description is not changed and RemoveLocalStream
-  // is called and later AddLocalStream is called again with the same stream.
-  AudioTrackVector audio_tracks = local_stream->GetAudioTracks();
-  for (AudioTrackVector::const_iterator it = audio_tracks.begin();
-       it != audio_tracks.end(); ++it) {
-    const TrackInfo* track_info = FindTrackInfo(local_audio_tracks_,
-                                                local_stream->label(),
-                                                (*it)->id());
-    if (track_info) {
-      OnLocalTrackSeen(track_info->stream_label, track_info->track_id,
-                       track_info->ssrc, cricket::MEDIA_TYPE_AUDIO);
-    }
-  }
-
-  VideoTrackVector video_tracks = local_stream->GetVideoTracks();
-  for (VideoTrackVector::const_iterator it = video_tracks.begin();
-       it != video_tracks.end(); ++it) {
-    const TrackInfo* track_info = FindTrackInfo(local_video_tracks_,
-                                                local_stream->label(),
-                                                (*it)->id());
-    if (track_info) {
-      OnLocalTrackSeen(track_info->stream_label, track_info->track_id,
-                       track_info->ssrc, cricket::MEDIA_TYPE_VIDEO);
-    }
-  }
-  return true;
-}
-
-void MediaStreamSignaling::RemoveLocalStream(
-    MediaStreamInterface* local_stream) {
-  AudioTrackVector audio_tracks = local_stream->GetAudioTracks();
-  for (AudioTrackVector::const_iterator it = audio_tracks.begin();
-       it != audio_tracks.end(); ++it) {
-    const TrackInfo* track_info = FindTrackInfo(local_audio_tracks_,
-                                                local_stream->label(),
-                                                (*it)->id());
-    if (track_info) {
-      stream_observer_->OnRemoveLocalAudioTrack(local_stream, *it,
-                                                track_info->ssrc);
-    }
-  }
-  VideoTrackVector video_tracks = local_stream->GetVideoTracks();
-  for (VideoTrackVector::const_iterator it = video_tracks.begin();
-       it != video_tracks.end(); ++it) {
-    const TrackInfo* track_info = FindTrackInfo(local_video_tracks_,
-                                                local_stream->label(),
-                                                (*it)->id());
-    if (track_info) {
-      stream_observer_->OnRemoveLocalVideoTrack(local_stream, *it);
-    }
-  }
-
-  local_streams_->RemoveStream(local_stream);
-  stream_observer_->OnRemoveLocalStream(local_stream);
-}
-
-bool MediaStreamSignaling::GetOptionsForOffer(
-    const PeerConnectionInterface::RTCOfferAnswerOptions& rtc_options,
-    cricket::MediaSessionOptions* session_options) {
-  typedef PeerConnectionInterface::RTCOfferAnswerOptions RTCOfferAnswerOptions;
-  if (!IsValidOfferToReceiveMedia(rtc_options.offer_to_receive_audio) ||
-      !IsValidOfferToReceiveMedia(rtc_options.offer_to_receive_video)) {
-    return false;
-  }
-
-  SetStreams(session_options, local_streams_, rtp_data_channels_);
-
-  // According to the spec, offer to receive audio/video if the constraint is
-  // not set and there are send streams.
-  if (rtc_options.offer_to_receive_audio == RTCOfferAnswerOptions::kUndefined) {
-    session_options->recv_audio =
-        session_options->HasSendMediaStream(cricket::MEDIA_TYPE_AUDIO);
-  } else {
-    session_options->recv_audio = (rtc_options.offer_to_receive_audio > 0);
-  }
-  if (rtc_options.offer_to_receive_video == RTCOfferAnswerOptions::kUndefined) {
-    session_options->recv_video =
-        session_options->HasSendMediaStream(cricket::MEDIA_TYPE_VIDEO);
-  } else {
-    session_options->recv_video = (rtc_options.offer_to_receive_video > 0);
-  }
-
-  session_options->vad_enabled = rtc_options.voice_activity_detection;
-  session_options->transport_options.ice_restart = rtc_options.ice_restart;
-  session_options->bundle_enabled = rtc_options.use_rtp_mux;
-
-  session_options->bundle_enabled = EvaluateNeedForBundle(*session_options);
-  return true;
-}
-
-bool MediaStreamSignaling::GetOptionsForAnswer(
-    const MediaConstraintsInterface* constraints,
-    cricket::MediaSessionOptions* options) {
-  SetStreams(options, local_streams_, rtp_data_channels_);
-
-  options->recv_audio = false;
-  options->recv_video = false;
-  if (!ParseConstraintsForAnswer(constraints, options)) {
-    return false;
-  }
-  options->bundle_enabled = EvaluateNeedForBundle(*options);
-  return true;
-}
-
-// Updates or creates remote MediaStream objects given a
-// remote SessionDesription.
-// If the remote SessionDesription contains new remote MediaStreams
-// the observer OnAddStream method is called. If a remote MediaStream is missing
-// from the remote SessionDescription OnRemoveStream is called.
-void MediaStreamSignaling::OnRemoteDescriptionChanged(
-    const SessionDescriptionInterface* desc) {
-  const cricket::SessionDescription* remote_desc = desc->description();
-  rtc::scoped_refptr<StreamCollection> new_streams(
-      StreamCollection::Create());
-
-  // Find all audio rtp streams and create corresponding remote AudioTracks
-  // and MediaStreams.
-  const cricket::ContentInfo* audio_content = GetFirstAudioContent(remote_desc);
-  if (audio_content) {
-    const cricket::AudioContentDescription* desc =
-        static_cast<const cricket::AudioContentDescription*>(
-            audio_content->description);
-    UpdateRemoteStreamsList(desc->streams(), desc->type(), new_streams);
-    remote_info_.default_audio_track_needed =
-        MediaContentDirectionHasSend(desc->direction()) &&
-            desc->streams().empty();
-  }
-
-  // Find all video rtp streams and create corresponding remote VideoTracks
-  // and MediaStreams.
-  const cricket::ContentInfo* video_content = GetFirstVideoContent(remote_desc);
-  if (video_content) {
-    const cricket::VideoContentDescription* desc =
-        static_cast<const cricket::VideoContentDescription*>(
-            video_content->description);
-    UpdateRemoteStreamsList(desc->streams(), desc->type(), new_streams);
-    remote_info_.default_video_track_needed =
-        MediaContentDirectionHasSend(desc->direction()) &&
-            desc->streams().empty();
-  }
-
-  // Update the DataChannels with the information from the remote peer.
-  const cricket::ContentInfo* data_content = GetFirstDataContent(remote_desc);
-  if (data_content) {
-    const cricket::DataContentDescription* data_desc =
-        static_cast<const cricket::DataContentDescription*>(
-            data_content->description);
-    if (rtc::starts_with(
-            data_desc->protocol().data(), cricket::kMediaProtocolRtpPrefix)) {
-      UpdateRemoteRtpDataChannels(data_desc->streams());
-    }
-  }
-
-  // Iterate new_streams and notify the observer about new MediaStreams.
-  for (size_t i = 0; i < new_streams->count(); ++i) {
-    MediaStreamInterface* new_stream = new_streams->at(i);
-    stream_observer_->OnAddRemoteStream(new_stream);
-  }
-
-  // Find removed MediaStreams.
-  if (remote_info_.IsDefaultMediaStreamNeeded() &&
-      remote_streams_->find(kDefaultStreamLabel) != NULL) {
-    // The default media stream already exists. No need to do anything.
-  } else {
-    UpdateEndedRemoteMediaStreams();
-    remote_info_.msid_supported |= remote_streams_->count() > 0;
-  }
-  MaybeCreateDefaultStream();
-}
-
-void MediaStreamSignaling::OnLocalDescriptionChanged(
-    const SessionDescriptionInterface* desc) {
-  const cricket::ContentInfo* audio_content =
-      GetFirstAudioContent(desc->description());
-  if (audio_content) {
-    if (audio_content->rejected) {
-      RejectRemoteTracks(cricket::MEDIA_TYPE_AUDIO);
-    }
-    const cricket::AudioContentDescription* audio_desc =
-        static_cast<const cricket::AudioContentDescription*>(
-            audio_content->description);
-    UpdateLocalTracks(audio_desc->streams(), audio_desc->type());
-  }
-
-  const cricket::ContentInfo* video_content =
-      GetFirstVideoContent(desc->description());
-  if (video_content) {
-    if (video_content->rejected) {
-      RejectRemoteTracks(cricket::MEDIA_TYPE_VIDEO);
-    }
-    const cricket::VideoContentDescription* video_desc =
-        static_cast<const cricket::VideoContentDescription*>(
-            video_content->description);
-    UpdateLocalTracks(video_desc->streams(), video_desc->type());
-  }
-
-  const cricket::ContentInfo* data_content =
-      GetFirstDataContent(desc->description());
-  if (data_content) {
-    const cricket::DataContentDescription* data_desc =
-        static_cast<const cricket::DataContentDescription*>(
-            data_content->description);
-    if (rtc::starts_with(
-            data_desc->protocol().data(), cricket::kMediaProtocolRtpPrefix)) {
-      UpdateLocalRtpDataChannels(data_desc->streams());
-    }
-  }
-}
-
-void MediaStreamSignaling::OnAudioChannelClose() {
-  RejectRemoteTracks(cricket::MEDIA_TYPE_AUDIO);
-}
-
-void MediaStreamSignaling::OnVideoChannelClose() {
-  RejectRemoteTracks(cricket::MEDIA_TYPE_VIDEO);
-}
-
-void MediaStreamSignaling::OnDataChannelClose() {
-  // Use a temporary copy of the RTP/SCTP DataChannel list because the
-  // DataChannel may callback to us and try to modify the list.
-  RtpDataChannels temp_rtp_dcs;
-  temp_rtp_dcs.swap(rtp_data_channels_);
-  RtpDataChannels::iterator it1 = temp_rtp_dcs.begin();
-  for (; it1 != temp_rtp_dcs.end(); ++it1) {
-    it1->second->OnDataEngineClose();
-  }
-
-  SctpDataChannels temp_sctp_dcs;
-  temp_sctp_dcs.swap(sctp_data_channels_);
-  SctpDataChannels::iterator it2 = temp_sctp_dcs.begin();
-  for (; it2 != temp_sctp_dcs.end(); ++it2) {
-    (*it2)->OnDataEngineClose();
-  }
-}
-
-void MediaStreamSignaling::UpdateRemoteStreamsList(
-    const cricket::StreamParamsVec& streams,
-    cricket::MediaType media_type,
-    StreamCollection* new_streams) {
-  TrackInfos* current_tracks = GetRemoteTracks(media_type);
-
-  // Find removed tracks. Ie tracks where the track id or ssrc don't match the
-  // new StreamParam.
-  TrackInfos::iterator track_it = current_tracks->begin();
-  while (track_it != current_tracks->end()) {
-    const TrackInfo& info = *track_it;
-    const cricket::StreamParams* params =
-        cricket::GetStreamBySsrc(streams, info.ssrc);
-    if (!params || params->id != info.track_id) {
-      OnRemoteTrackRemoved(info.stream_label, info.track_id, media_type);
-      track_it = current_tracks->erase(track_it);
-    } else {
-      ++track_it;
-    }
-  }
-
-  // Find new and active tracks.
-  for (cricket::StreamParamsVec::const_iterator it = streams.begin();
-       it != streams.end(); ++it) {
-    // The sync_label is the MediaStream label and the |stream.id| is the
-    // track id.
-    const std::string& stream_label = it->sync_label;
-    const std::string& track_id = it->id;
-    uint32_t ssrc = it->first_ssrc();
-
-    rtc::scoped_refptr<MediaStreamInterface> stream =
-        remote_streams_->find(stream_label);
-    if (!stream) {
-      // This is a new MediaStream. Create a new remote MediaStream.
-      stream = remote_stream_factory_->CreateMediaStream(stream_label);
-      remote_streams_->AddStream(stream);
-      new_streams->AddStream(stream);
-    }
-
-    const TrackInfo* track_info = FindTrackInfo(*current_tracks, stream_label,
-                                                track_id);
-    if (!track_info) {
-      current_tracks->push_back(TrackInfo(stream_label, track_id, ssrc));
-      OnRemoteTrackSeen(stream_label, track_id, it->first_ssrc(), media_type);
-    }
-  }
-}
-
-void MediaStreamSignaling::OnRemoteTrackSeen(const std::string& stream_label,
-                                             const std::string& track_id,
-                                             uint32_t ssrc,
-                                             cricket::MediaType media_type) {
-  MediaStreamInterface* stream = remote_streams_->find(stream_label);
-
-  if (media_type == cricket::MEDIA_TYPE_AUDIO) {
-    AudioTrackInterface* audio_track =
-        remote_stream_factory_->AddAudioTrack(stream, track_id);
-    stream_observer_->OnAddRemoteAudioTrack(stream, audio_track, ssrc);
-  } else if (media_type == cricket::MEDIA_TYPE_VIDEO) {
-    VideoTrackInterface* video_track =
-        remote_stream_factory_->AddVideoTrack(stream, track_id);
-    stream_observer_->OnAddRemoteVideoTrack(stream, video_track, ssrc);
-  } else {
-    ASSERT(false && "Invalid media type");
-  }
-}
-
-void MediaStreamSignaling::OnRemoteTrackRemoved(
-    const std::string& stream_label,
-    const std::string& track_id,
-    cricket::MediaType media_type) {
-  MediaStreamInterface* stream = remote_streams_->find(stream_label);
-
-  if (media_type == cricket::MEDIA_TYPE_AUDIO) {
-    rtc::scoped_refptr<AudioTrackInterface> audio_track =
-        stream->FindAudioTrack(track_id);
-    if (audio_track) {
-      audio_track->set_state(webrtc::MediaStreamTrackInterface::kEnded);
-      stream->RemoveTrack(audio_track);
-      stream_observer_->OnRemoveRemoteAudioTrack(stream, audio_track);
-    }
-  } else if (media_type == cricket::MEDIA_TYPE_VIDEO) {
-    rtc::scoped_refptr<VideoTrackInterface> video_track =
-        stream->FindVideoTrack(track_id);
-    if (video_track) {
-      video_track->set_state(webrtc::MediaStreamTrackInterface::kEnded);
-      stream->RemoveTrack(video_track);
-      stream_observer_->OnRemoveRemoteVideoTrack(stream, video_track);
-    }
-  } else {
-    ASSERT(false && "Invalid media type");
-  }
-}
-
-void MediaStreamSignaling::RejectRemoteTracks(cricket::MediaType media_type) {
-  TrackInfos* current_tracks = GetRemoteTracks(media_type);
-  for (TrackInfos::iterator track_it = current_tracks->begin();
-       track_it != current_tracks->end(); ++track_it) {
-    const TrackInfo& info = *track_it;
-    MediaStreamInterface* stream = remote_streams_->find(info.stream_label);
-    if (media_type == cricket::MEDIA_TYPE_AUDIO) {
-      AudioTrackInterface* track = stream->FindAudioTrack(info.track_id);
-      // There's no guarantee the track is still available, e.g. the track may
-      // have been removed from the stream by javascript.
-      if (track) {
-        track->set_state(webrtc::MediaStreamTrackInterface::kEnded);
-      }
-    }
-    if (media_type == cricket::MEDIA_TYPE_VIDEO) {
-      VideoTrackInterface* track = stream->FindVideoTrack(info.track_id);
-      // There's no guarantee the track is still available, e.g. the track may
-      // have been removed from the stream by javascript.
-      if (track) {
-        track->set_state(webrtc::MediaStreamTrackInterface::kEnded);
-      }
-    }
-  }
-}
-
-void MediaStreamSignaling::UpdateEndedRemoteMediaStreams() {
-  std::vector<scoped_refptr<MediaStreamInterface> > streams_to_remove;
-  for (size_t i = 0; i < remote_streams_->count(); ++i) {
-    MediaStreamInterface*stream = remote_streams_->at(i);
-    if (stream->GetAudioTracks().empty() && stream->GetVideoTracks().empty()) {
-      streams_to_remove.push_back(stream);
-    }
-  }
-
-  std::vector<scoped_refptr<MediaStreamInterface> >::const_iterator it;
-  for (it = streams_to_remove.begin(); it != streams_to_remove.end(); ++it) {
-    remote_streams_->RemoveStream(*it);
-    stream_observer_->OnRemoveRemoteStream(*it);
-  }
-}
-
-void MediaStreamSignaling::MaybeCreateDefaultStream() {
-  if (!remote_info_.IsDefaultMediaStreamNeeded())
-    return;
-
-  bool default_created = false;
-
-  scoped_refptr<MediaStreamInterface> default_remote_stream =
-      remote_streams_->find(kDefaultStreamLabel);
-  if (default_remote_stream == NULL) {
-    default_created = true;
-    default_remote_stream =
-        remote_stream_factory_->CreateMediaStream(kDefaultStreamLabel);
-    remote_streams_->AddStream(default_remote_stream);
-  }
-  if (remote_info_.default_audio_track_needed &&
-      default_remote_stream->GetAudioTracks().size() == 0) {
-    remote_audio_tracks_.push_back(TrackInfo(kDefaultStreamLabel,
-                                             kDefaultAudioTrackLabel, 0));
-
-    OnRemoteTrackSeen(kDefaultStreamLabel, kDefaultAudioTrackLabel, 0,
-                      cricket::MEDIA_TYPE_AUDIO);
-  }
-  if (remote_info_.default_video_track_needed &&
-      default_remote_stream->GetVideoTracks().size() == 0) {
-    remote_video_tracks_.push_back(TrackInfo(kDefaultStreamLabel,
-                                             kDefaultVideoTrackLabel, 0));
-    OnRemoteTrackSeen(kDefaultStreamLabel, kDefaultVideoTrackLabel, 0,
-                      cricket::MEDIA_TYPE_VIDEO);
-  }
-  if (default_created) {
-    stream_observer_->OnAddRemoteStream(default_remote_stream);
-  }
-}
-
-MediaStreamSignaling::TrackInfos* MediaStreamSignaling::GetRemoteTracks(
-    cricket::MediaType type) {
-  if (type == cricket::MEDIA_TYPE_AUDIO)
-    return &remote_audio_tracks_;
-  else if (type == cricket::MEDIA_TYPE_VIDEO)
-    return &remote_video_tracks_;
-  ASSERT(false && "Unknown MediaType");
-  return NULL;
-}
-
-MediaStreamSignaling::TrackInfos* MediaStreamSignaling::GetLocalTracks(
-    cricket::MediaType media_type) {
-  ASSERT(media_type == cricket::MEDIA_TYPE_AUDIO ||
-         media_type == cricket::MEDIA_TYPE_VIDEO);
-
-  return (media_type == cricket::MEDIA_TYPE_AUDIO) ?
-      &local_audio_tracks_ : &local_video_tracks_;
-}
-
-void MediaStreamSignaling::UpdateLocalTracks(
-    const std::vector<cricket::StreamParams>& streams,
-    cricket::MediaType media_type) {
-  TrackInfos* current_tracks = GetLocalTracks(media_type);
-
-  // Find removed tracks. Ie tracks where the track id, stream label or ssrc
-  // don't match the new StreamParam.
-  TrackInfos::iterator track_it = current_tracks->begin();
-  while (track_it != current_tracks->end()) {
-    const TrackInfo& info = *track_it;
-    const cricket::StreamParams* params =
-        cricket::GetStreamBySsrc(streams, info.ssrc);
-    if (!params || params->id != info.track_id ||
-        params->sync_label != info.stream_label) {
-      OnLocalTrackRemoved(info.stream_label, info.track_id, info.ssrc,
-                          media_type);
-      track_it = current_tracks->erase(track_it);
-    } else {
-      ++track_it;
-    }
-  }
-
-  // Find new and active tracks.
-  for (cricket::StreamParamsVec::const_iterator it = streams.begin();
-       it != streams.end(); ++it) {
-    // The sync_label is the MediaStream label and the |stream.id| is the
-    // track id.
-    const std::string& stream_label = it->sync_label;
-    const std::string& track_id = it->id;
-    uint32_t ssrc = it->first_ssrc();
-    const TrackInfo* track_info = FindTrackInfo(*current_tracks,
-                                                stream_label,
-                                                track_id);
-    if (!track_info) {
-      current_tracks->push_back(TrackInfo(stream_label, track_id, ssrc));
-      OnLocalTrackSeen(stream_label, track_id, it->first_ssrc(), media_type);
-    }
-  }
-}
-
-void MediaStreamSignaling::OnLocalTrackSeen(const std::string& stream_label,
-                                            const std::string& track_id,
-                                            uint32_t ssrc,
-                                            cricket::MediaType media_type) {
-  MediaStreamInterface* stream = local_streams_->find(stream_label);
-  if (!stream) {
-    LOG(LS_WARNING) << "An unknown local MediaStream with label "
-                    << stream_label <<  " has been configured.";
-    return;
-  }
-
-  if (media_type == cricket::MEDIA_TYPE_AUDIO) {
-    AudioTrackInterface* audio_track = stream->FindAudioTrack(track_id);
-    if (!audio_track) {
-      LOG(LS_WARNING) << "An unknown local AudioTrack with id , "
-                      << track_id <<  " has been configured.";
-      return;
-    }
-    stream_observer_->OnAddLocalAudioTrack(stream, audio_track, ssrc);
-  } else if (media_type == cricket::MEDIA_TYPE_VIDEO) {
-    VideoTrackInterface* video_track = stream->FindVideoTrack(track_id);
-    if (!video_track) {
-      LOG(LS_WARNING) << "An unknown local VideoTrack with id , "
-                      << track_id <<  " has been configured.";
-      return;
-    }
-    stream_observer_->OnAddLocalVideoTrack(stream, video_track, ssrc);
-  } else {
-    ASSERT(false && "Invalid media type");
-  }
-}
-
-void MediaStreamSignaling::OnLocalTrackRemoved(const std::string& stream_label,
-                                               const std::string& track_id,
-                                               uint32_t ssrc,
-                                               cricket::MediaType media_type) {
-  MediaStreamInterface* stream = local_streams_->find(stream_label);
-  if (!stream) {
-    // This is the normal case. Ie RemoveLocalStream has been called and the
-    // SessionDescriptions has been renegotiated.
-    return;
-  }
-  // A track has been removed from the SessionDescription but the MediaStream
-  // is still associated with MediaStreamSignaling. This only occurs if the SDP
-  // doesn't match with the calls to AddLocalStream and RemoveLocalStream.
-
-  if (media_type == cricket::MEDIA_TYPE_AUDIO) {
-    AudioTrackInterface* audio_track = stream->FindAudioTrack(track_id);
-    if (!audio_track) {
-      return;
-    }
-    stream_observer_->OnRemoveLocalAudioTrack(stream, audio_track, ssrc);
-  } else if (media_type == cricket::MEDIA_TYPE_VIDEO) {
-    VideoTrackInterface* video_track = stream->FindVideoTrack(track_id);
-    if (!video_track) {
-      return;
-    }
-    stream_observer_->OnRemoveLocalVideoTrack(stream, video_track);
-  } else {
-    ASSERT(false && "Invalid media type.");
-  }
-}
-
-void MediaStreamSignaling::UpdateLocalRtpDataChannels(
-    const cricket::StreamParamsVec& streams) {
-  std::vector<std::string> existing_channels;
-
-  // Find new and active data channels.
-  for (cricket::StreamParamsVec::const_iterator it =streams.begin();
-       it != streams.end(); ++it) {
-    // |it->sync_label| is actually the data channel label. The reason is that
-    // we use the same naming of data channels as we do for
-    // MediaStreams and Tracks.
-    // For MediaStreams, the sync_label is the MediaStream label and the
-    // track label is the same as |streamid|.
-    const std::string& channel_label = it->sync_label;
-    RtpDataChannels::iterator data_channel_it =
-        rtp_data_channels_.find(channel_label);
-    if (!VERIFY(data_channel_it != rtp_data_channels_.end())) {
-      continue;
-    }
-    // Set the SSRC the data channel should use for sending.
-    data_channel_it->second->SetSendSsrc(it->first_ssrc());
-    existing_channels.push_back(data_channel_it->first);
-  }
-
-  UpdateClosingDataChannels(existing_channels, true);
-}
-
-void MediaStreamSignaling::UpdateRemoteRtpDataChannels(
-    const cricket::StreamParamsVec& streams) {
-  std::vector<std::string> existing_channels;
-
-  // Find new and active data channels.
-  for (cricket::StreamParamsVec::const_iterator it = streams.begin();
-       it != streams.end(); ++it) {
-    // The data channel label is either the mslabel or the SSRC if the mslabel
-    // does not exist. Ex a=ssrc:444330170 mslabel:test1.
-    std::string label = it->sync_label.empty() ?
-        rtc::ToString(it->first_ssrc()) : it->sync_label;
-    RtpDataChannels::iterator data_channel_it =
-        rtp_data_channels_.find(label);
-    if (data_channel_it == rtp_data_channels_.end()) {
-      // This is a new data channel.
-      CreateRemoteDataChannel(label, it->first_ssrc());
-    } else {
-      data_channel_it->second->SetReceiveSsrc(it->first_ssrc());
-    }
-    existing_channels.push_back(label);
-  }
-
-  UpdateClosingDataChannels(existing_channels, false);
-}
-
-void MediaStreamSignaling::UpdateClosingDataChannels(
-    const std::vector<std::string>& active_channels, bool is_local_update) {
-  RtpDataChannels::iterator it = rtp_data_channels_.begin();
-  while (it != rtp_data_channels_.end()) {
-    DataChannel* data_channel = it->second;
-    if (std::find(active_channels.begin(), active_channels.end(),
-                  data_channel->label()) != active_channels.end()) {
-      ++it;
-      continue;
-    }
-
-    if (is_local_update)
-      data_channel->SetSendSsrc(0);
-    else
-      data_channel->RemotePeerRequestClose();
-
-    if (data_channel->state() == DataChannel::kClosed) {
-      rtp_data_channels_.erase(it);
-      it = rtp_data_channels_.begin();
-    } else {
-      ++it;
-    }
-  }
-}
-
-void MediaStreamSignaling::CreateRemoteDataChannel(const std::string& label,
-                                                   uint32_t remote_ssrc) {
-  if (!data_channel_factory_) {
-    LOG(LS_WARNING) << "Remote peer requested a DataChannel but DataChannels "
-                    << "are not supported.";
-    return;
-  }
-  scoped_refptr<DataChannel> channel(
-      data_channel_factory_->CreateDataChannel(label, NULL));
-  if (!channel.get()) {
-    LOG(LS_WARNING) << "Remote peer requested a DataChannel but"
-                    << "CreateDataChannel failed.";
-    return;
-  }
-  channel->SetReceiveSsrc(remote_ssrc);
-  stream_observer_->OnAddDataChannel(channel);
-}
-
-void MediaStreamSignaling::OnDataTransportCreatedForSctp() {
-  SctpDataChannels::iterator it = sctp_data_channels_.begin();
-  for (; it != sctp_data_channels_.end(); ++it) {
-    (*it)->OnTransportChannelCreated();
-  }
-}
-
-void MediaStreamSignaling::OnDtlsRoleReadyForSctp(rtc::SSLRole role) {
-  SctpDataChannels::iterator it = sctp_data_channels_.begin();
-  for (; it != sctp_data_channels_.end(); ++it) {
-    if ((*it)->id() < 0) {
-      int sid;
-      if (!AllocateSctpSid(role, &sid)) {
-        LOG(LS_ERROR) << "Failed to allocate SCTP sid.";
-        continue;
-      }
-      (*it)->SetSctpSid(sid);
-    }
-  }
-}
-
-void MediaStreamSignaling::OnRemoteSctpDataChannelClosed(uint32_t sid) {
-  int index = FindDataChannelBySid(sid);
-  if (index < 0) {
-    LOG(LS_WARNING) << "Unexpected sid " << sid
-                    << " of the remotely closed DataChannel.";
-    return;
-  }
-  sctp_data_channels_[index]->Close();
-}
-
-const MediaStreamSignaling::TrackInfo*
-MediaStreamSignaling::FindTrackInfo(
-    const MediaStreamSignaling::TrackInfos& infos,
-    const std::string& stream_label,
-    const std::string track_id) const {
-
-  for (TrackInfos::const_iterator it = infos.begin();
-      it != infos.end(); ++it) {
-    if (it->stream_label == stream_label && it->track_id == track_id)
-      return &*it;
-  }
-  return NULL;
-}
-
-int MediaStreamSignaling::FindDataChannelBySid(int sid) const {
-  for (size_t i = 0; i < sctp_data_channels_.size(); ++i) {
-    if (sctp_data_channels_[i]->id() == sid) {
-      return static_cast<int>(i);
-    }
-  }
-  return -1;
-}
-
-}  // namespace webrtc
+// TODO(deadbeef): Remove this file once Chrome build files don't reference it.
diff --git a/talk/app/webrtc/mediastreamsignaling.h b/talk/app/webrtc/mediastreamsignaling.h
index b858b5b..e8c5c11 100644
--- a/talk/app/webrtc/mediastreamsignaling.h
+++ b/talk/app/webrtc/mediastreamsignaling.h
@@ -25,379 +25,4 @@
  * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  */
 
-#ifndef TALK_APP_WEBRTC_MEDIASTREAMSIGNALING_H_
-#define TALK_APP_WEBRTC_MEDIASTREAMSIGNALING_H_
-
-#include <map>
-#include <string>
-#include <vector>
-
-#include "talk/app/webrtc/datachannel.h"
-#include "talk/app/webrtc/mediastream.h"
-#include "talk/app/webrtc/peerconnectioninterface.h"
-#include "talk/app/webrtc/streamcollection.h"
-#include "talk/session/media/mediasession.h"
-#include "webrtc/base/scoped_ref_ptr.h"
-#include "webrtc/base/sigslot.h"
-
-namespace rtc {
-class Thread;
-}  // namespace rtc
-
-namespace webrtc {
-
-class RemoteMediaStreamFactory;
-
-// A MediaStreamSignalingObserver is notified when events happen to
-// MediaStreams, MediaStreamTracks or DataChannels associated with the observed
-// MediaStreamSignaling object. The notifications identify the stream, track or
-// channel.
-class MediaStreamSignalingObserver {
- public:
-  // Triggered when the remote SessionDescription has a new stream.
-  virtual void OnAddRemoteStream(MediaStreamInterface* stream) = 0;
-
-  // Triggered when the remote SessionDescription removes a stream.
-  virtual void OnRemoveRemoteStream(MediaStreamInterface* stream) = 0;
-
-  // Triggered when the remote SessionDescription has a new data channel.
-  virtual void OnAddDataChannel(DataChannelInterface* data_channel) = 0;
-
-  // Triggered when the remote SessionDescription has a new audio track.
-  virtual void OnAddRemoteAudioTrack(MediaStreamInterface* stream,
-                                     AudioTrackInterface* audio_track,
-                                     uint32_t ssrc) = 0;
-
-  // Triggered when the remote SessionDescription has a new video track.
-  virtual void OnAddRemoteVideoTrack(MediaStreamInterface* stream,
-                                     VideoTrackInterface* video_track,
-                                     uint32_t ssrc) = 0;
-
-  // Triggered when the remote SessionDescription has removed an audio track.
-  virtual void OnRemoveRemoteAudioTrack(MediaStreamInterface* stream,
-                                        AudioTrackInterface* audio_track)  = 0;
-
-  // Triggered when the remote SessionDescription has removed a video track.
-  virtual void OnRemoveRemoteVideoTrack(MediaStreamInterface* stream,
-                                        VideoTrackInterface* video_track) = 0;
-
-  // Triggered when the local SessionDescription has a new audio track.
-  virtual void OnAddLocalAudioTrack(MediaStreamInterface* stream,
-                                    AudioTrackInterface* audio_track,
-                                    uint32_t ssrc) = 0;
-
-  // Triggered when the local SessionDescription has a new video track.
-  virtual void OnAddLocalVideoTrack(MediaStreamInterface* stream,
-                                    VideoTrackInterface* video_track,
-                                    uint32_t ssrc) = 0;
-
-  // Triggered when the local SessionDescription has removed an audio track.
-  virtual void OnRemoveLocalAudioTrack(MediaStreamInterface* stream,
-                                       AudioTrackInterface* audio_track,
-                                       uint32_t ssrc) = 0;
-
-  // Triggered when the local SessionDescription has removed a video track.
-  virtual void OnRemoveLocalVideoTrack(MediaStreamInterface* stream,
-                                       VideoTrackInterface* video_track) = 0;
-
-  // Triggered when RemoveLocalStream is called. |stream| is no longer used
-  // when negotiating and all tracks in |stream| should stop providing data to
-  // this PeerConnection. This doesn't mean that the local session description
-  // has changed and OnRemoveLocalAudioTrack and OnRemoveLocalVideoTrack is not
-  // called for each individual track.
-  virtual void OnRemoveLocalStream(MediaStreamInterface* stream) = 0;
-
- protected:
-  ~MediaStreamSignalingObserver() {}
-};
-
-// MediaStreamSignaling works as a glue between MediaStreams and a cricket
-// classes for SessionDescriptions.
-// It is used for creating cricket::MediaSessionOptions given the local
-// MediaStreams and data channels.
-//
-// It is responsible for creating remote MediaStreams given a remote
-// SessionDescription and creating cricket::MediaSessionOptions given
-// local MediaStreams.
-//
-// To signal that a DataChannel should be established:
-// 1. Call AddDataChannel with the new DataChannel. Next time
-//    GetMediaSessionOptions will include the description of the DataChannel.
-// 2. When a local session description is set, call UpdateLocalStreams with the
-//    session description. This will set the SSRC used for sending data on
-//    this DataChannel.
-// 3. When remote session description is set, call UpdateRemoteStream with the
-//    session description. If the DataChannel label and a SSRC is included in
-//    the description, the DataChannel is updated with SSRC that will be used
-//    for receiving data.
-// 4. When both the local and remote SSRC of a DataChannel is set the state of
-//    the DataChannel change to kOpen.
-//
-// To setup a DataChannel initialized by the remote end.
-// 1. When remote session description is set, call UpdateRemoteStream with the
-//    session description. If a label and a SSRC of a new DataChannel is found
-//    MediaStreamSignalingObserver::OnAddDataChannel with the label and SSRC is
-//    triggered.
-// 2. Create a DataChannel instance with the label and set the remote SSRC.
-// 3. Call AddDataChannel with this new DataChannel.  GetMediaSessionOptions
-//    will include the description of the DataChannel.
-// 4. Create a local session description and call UpdateLocalStreams. This will
-//    set the local SSRC used by the DataChannel.
-// 5. When both the local and remote SSRC of a DataChannel is set the state of
-//    the DataChannel change to kOpen.
-//
-// To close a DataChannel:
-// 1. Call DataChannel::Close. This will change the state of the DataChannel to
-//    kClosing. GetMediaSessionOptions will not
-//    include the description of the DataChannel.
-// 2. When a local session description is set, call UpdateLocalStreams with the
-//    session description. The description will no longer contain the
-//    DataChannel label or SSRC.
-// 3. When remote session description is set, call UpdateRemoteStream with the
-//    session description. The description will no longer contain the
-//    DataChannel label or SSRC. The DataChannel SSRC is updated with SSRC=0.
-//    The DataChannel change state to kClosed.
-
-class MediaStreamSignaling : public sigslot::has_slots<> {
- public:
-  typedef std::map<std::string, rtc::scoped_refptr<DataChannel> >
-      RtpDataChannels;
-  typedef std::vector<rtc::scoped_refptr<DataChannel>> SctpDataChannels;
-
-  MediaStreamSignaling(rtc::Thread* signaling_thread,
-                       MediaStreamSignalingObserver* stream_observer,
-                       cricket::ChannelManager* channel_manager);
-  virtual ~MediaStreamSignaling();
-
-  // Notify all referenced objects that MediaStreamSignaling will be teared
-  // down. This method must be called prior to the dtor.
-  void TearDown();
-
-  // Set a factory for creating data channels that are initiated by the remote
-  // peer.
-  void SetDataChannelFactory(DataChannelFactory* data_channel_factory) {
-    data_channel_factory_ = data_channel_factory;
-  }
-
-  // Checks if |id| is available to be assigned to a new SCTP data channel.
-  bool IsSctpSidAvailable(int sid) const;
-
-  // Gets the first available SCTP id that is not assigned to any existing
-  // data channels.
-  bool AllocateSctpSid(rtc::SSLRole role, int* sid);
-
-  // Adds |local_stream| to the collection of known MediaStreams that will be
-  // offered in a SessionDescription.
-  bool AddLocalStream(MediaStreamInterface* local_stream);
-
-  // Removes |local_stream| from the collection of known MediaStreams that will
-  // be offered in a SessionDescription.
-  void RemoveLocalStream(MediaStreamInterface* local_stream);
-
-  // Checks if any data channel has been added.
-  bool HasDataChannels() const;
-  // Adds |data_channel| to the collection of DataChannels that will be
-  // be offered in a SessionDescription.
-  bool AddDataChannel(DataChannel* data_channel);
-  // After we receive an OPEN message, create a data channel and add it.
-  bool AddDataChannelFromOpenMessage(const cricket::ReceiveDataParams& params,
-                                     const rtc::Buffer& payload);
-  void RemoveSctpDataChannel(int sid);
-
-  // Returns a MediaSessionOptions struct with options decided by |options|,
-  // the local MediaStreams and DataChannels.
-  virtual bool GetOptionsForOffer(
-      const PeerConnectionInterface::RTCOfferAnswerOptions& rtc_options,
-      cricket::MediaSessionOptions* session_options);
-
-  // Returns a MediaSessionOptions struct with options decided by
-  // |constraints|, the local MediaStreams and DataChannels.
-  virtual bool GetOptionsForAnswer(
-      const MediaConstraintsInterface* constraints,
-      cricket::MediaSessionOptions* options);
-
-  // Called when the remote session description has changed. The purpose is to
-  // update remote MediaStreams and DataChannels with the current
-  // session state.
-  // If the remote SessionDescription contain information about a new remote
-  // MediaStreams a new remote MediaStream is created and
-  // MediaStreamSignalingObserver::OnAddStream is called.
-  // If a remote MediaStream is missing from
-  // the remote SessionDescription MediaStreamSignalingObserver::OnRemoveStream
-  // is called.
-  // If the SessionDescription contains information about a new DataChannel,
-  // MediaStreamSignalingObserver::OnAddDataChannel is called with the
-  // DataChannel.
-  void OnRemoteDescriptionChanged(const SessionDescriptionInterface* desc);
-
-  // Called when the local session description has changed. The purpose is to
-  // update local and remote MediaStreams and DataChannels with the current
-  // session state.
-  // If |desc| indicates that the media type should be rejected, the method
-  // ends the remote MediaStreamTracks.
-  // It also updates local DataChannels with information about its local SSRC.
-  void OnLocalDescriptionChanged(const SessionDescriptionInterface* desc);
-
-  // Called when the audio channel closes.
-  void OnAudioChannelClose();
-  // Called when the video channel closes.
-  void OnVideoChannelClose();
-  // Called when the data channel closes.
-  void OnDataChannelClose();
-
-  // Returns all current known local MediaStreams.
-  StreamCollectionInterface* local_streams() const { return local_streams_;}
-
-  // Returns all current remote MediaStreams.
-  StreamCollectionInterface* remote_streams() const {
-    return remote_streams_.get();
-  }
-  void OnDataTransportCreatedForSctp();
-  void OnDtlsRoleReadyForSctp(rtc::SSLRole role);
-  void OnRemoteSctpDataChannelClosed(uint32_t sid);
-
-  const SctpDataChannels& sctp_data_channels() const {
-    return sctp_data_channels_;
-  }
-
- private:
-  struct RemotePeerInfo {
-    RemotePeerInfo()
-        : msid_supported(false),
-          default_audio_track_needed(false),
-          default_video_track_needed(false) {
-    }
-    // True if it has been discovered that the remote peer support MSID.
-    bool msid_supported;
-    // The remote peer indicates in the session description that audio will be
-    // sent but no MSID is given.
-    bool default_audio_track_needed;
-    // The remote peer indicates in the session description that video will be
-    // sent but no MSID is given.
-    bool default_video_track_needed;
-
-    bool IsDefaultMediaStreamNeeded() {
-      return !msid_supported && (default_audio_track_needed ||
-          default_video_track_needed);
-    }
-  };
-
-  struct TrackInfo {
-    TrackInfo() : ssrc(0) {}
-    TrackInfo(const std::string& stream_label,
-              const std::string track_id,
-              uint32_t ssrc)
-        : stream_label(stream_label), track_id(track_id), ssrc(ssrc) {}
-    std::string stream_label;
-    std::string track_id;
-    uint32_t ssrc;
-  };
-  typedef std::vector<TrackInfo> TrackInfos;
-
-  // Makes sure a MediaStream Track is created for each StreamParam in
-  // |streams|. |media_type| is the type of the |streams| and can be either
-  // audio or video.
-  // If a new MediaStream is created it is added to |new_streams|.
-  void UpdateRemoteStreamsList(
-      const std::vector<cricket::StreamParams>& streams,
-      cricket::MediaType media_type,
-      StreamCollection* new_streams);
-
-  // Triggered when a remote track has been seen for the first time in a remote
-  // session description. It creates a remote MediaStreamTrackInterface
-  // implementation and triggers MediaStreamSignaling::OnAddRemoteAudioTrack or
-  // MediaStreamSignaling::OnAddRemoteVideoTrack.
-  void OnRemoteTrackSeen(const std::string& stream_label,
-                         const std::string& track_id,
-                         uint32_t ssrc,
-                         cricket::MediaType media_type);
-
-  // Triggered when a remote track has been removed from a remote session
-  // description. It removes the remote track with id |track_id| from a remote
-  // MediaStream and triggers MediaStreamSignaling::OnRemoveRemoteAudioTrack or
-  // MediaStreamSignaling::OnRemoveRemoteVideoTrack.
-  void OnRemoteTrackRemoved(const std::string& stream_label,
-                            const std::string& track_id,
-                            cricket::MediaType media_type);
-
-  // Set the MediaStreamTrackInterface::TrackState to |kEnded| on all remote
-  // tracks of type |media_type|.
-  void RejectRemoteTracks(cricket::MediaType media_type);
-
-  // Finds remote MediaStreams without any tracks and removes them from
-  // |remote_streams_| and notifies the observer that the MediaStream no longer
-  // exist.
-  void UpdateEndedRemoteMediaStreams();
-  void MaybeCreateDefaultStream();
-  TrackInfos* GetRemoteTracks(cricket::MediaType type);
-
-  // Returns a map of currently negotiated LocalTrackInfo of type |type|.
-  TrackInfos* GetLocalTracks(cricket::MediaType type);
-  bool FindLocalTrack(const std::string& track_id, cricket::MediaType type);
-
-  // Loops through the vector of |streams| and finds added and removed
-  // StreamParams since last time this method was called.
-  // For each new or removed StreamParam NotifyLocalTrackAdded or
-  // NotifyLocalTrackRemoved in invoked.
-  void UpdateLocalTracks(const std::vector<cricket::StreamParams>& streams,
-                         cricket::MediaType media_type);
-
-  // Triggered when a local track has been seen for the first time in a local
-  // session description.
-  // This method triggers MediaStreamSignaling::OnAddLocalAudioTrack or
-  // MediaStreamSignaling::OnAddLocalVideoTrack if the rtp streams in the local
-  // SessionDescription can be mapped to a MediaStreamTrack in a MediaStream in
-  // |local_streams_|
-  void OnLocalTrackSeen(const std::string& stream_label,
-                        const std::string& track_id,
-                        uint32_t ssrc,
-                        cricket::MediaType media_type);
-
-  // Triggered when a local track has been removed from a local session
-  // description.
-  // This method triggers MediaStreamSignaling::OnRemoveLocalAudioTrack or
-  // MediaStreamSignaling::OnRemoveLocalVideoTrack if a stream has been removed
-  // from the local SessionDescription and the stream can be mapped to a
-  // MediaStreamTrack in a MediaStream in |local_streams_|.
-  void OnLocalTrackRemoved(const std::string& stream_label,
-                           const std::string& track_id,
-                           uint32_t ssrc,
-                           cricket::MediaType media_type);
-
-  void UpdateLocalRtpDataChannels(const cricket::StreamParamsVec& streams);
-  void UpdateRemoteRtpDataChannels(const cricket::StreamParamsVec& streams);
-  void UpdateClosingDataChannels(
-      const std::vector<std::string>& active_channels, bool is_local_update);
-  void CreateRemoteDataChannel(const std::string& label, uint32_t remote_ssrc);
-
-  const TrackInfo* FindTrackInfo(const TrackInfos& infos,
-                                 const std::string& stream_label,
-                                 const std::string track_id) const;
-
-  // Returns the index of the specified SCTP DataChannel in sctp_data_channels_,
-  // or -1 if not found.
-  int FindDataChannelBySid(int sid) const;
-
-  RemotePeerInfo remote_info_;
-  rtc::Thread* signaling_thread_;
-  DataChannelFactory* data_channel_factory_;
-  MediaStreamSignalingObserver* stream_observer_;
-  rtc::scoped_refptr<StreamCollection> local_streams_;
-  rtc::scoped_refptr<StreamCollection> remote_streams_;
-  rtc::scoped_ptr<RemoteMediaStreamFactory> remote_stream_factory_;
-
-  TrackInfos remote_audio_tracks_;
-  TrackInfos remote_video_tracks_;
-  TrackInfos local_audio_tracks_;
-  TrackInfos local_video_tracks_;
-
-  int last_allocated_sctp_even_sid_;
-  int last_allocated_sctp_odd_sid_;
-
-  RtpDataChannels rtp_data_channels_;
-  SctpDataChannels sctp_data_channels_;
-};
-
-}  // namespace webrtc
-
-#endif  // TALK_APP_WEBRTC_MEDIASTREAMSIGNALING_H_
+// TODO(deadbeef): Remove this file once Chrome build files don't reference it.
diff --git a/talk/app/webrtc/mediastreamsignaling_unittest.cc b/talk/app/webrtc/mediastreamsignaling_unittest.cc
deleted file mode 100644
index 2333705..0000000
--- a/talk/app/webrtc/mediastreamsignaling_unittest.cc
+++ /dev/null
@@ -1,1341 +0,0 @@
-/*
- * libjingle
- * Copyright 2012 Google Inc.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions are met:
- *
- *  1. Redistributions of source code must retain the above copyright notice,
- *     this list of conditions and the following disclaimer.
- *  2. Redistributions in binary form must reproduce the above copyright notice,
- *     this list of conditions and the following disclaimer in the documentation
- *     and/or other materials provided with the distribution.
- *  3. The name of the author may not be used to endorse or promote products
- *     derived from this software without specific prior written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
- * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
- * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
- * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
- * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
- * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
- * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#include <string>
-#include <vector>
-
-#include "talk/app/webrtc/audiotrack.h"
-#include "talk/app/webrtc/mediastream.h"
-#include "talk/app/webrtc/mediastreamsignaling.h"
-#include "talk/app/webrtc/sctputils.h"
-#include "talk/app/webrtc/streamcollection.h"
-#include "talk/app/webrtc/test/fakeconstraints.h"
-#include "talk/app/webrtc/test/fakedatachannelprovider.h"
-#include "talk/app/webrtc/videotrack.h"
-#include "talk/media/base/fakemediaengine.h"
-#include "webrtc/p2p/base/constants.h"
-#include "webrtc/p2p/base/sessiondescription.h"
-#include "talk/session/media/channelmanager.h"
-#include "webrtc/base/gunit.h"
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/base/stringutils.h"
-#include "webrtc/base/thread.h"
-
-static const char kStreams[][8] = {"stream1", "stream2"};
-static const char kAudioTracks[][32] = {"audiotrack0", "audiotrack1"};
-static const char kVideoTracks[][32] = {"videotrack0", "videotrack1"};
-
-using webrtc::AudioTrack;
-using webrtc::AudioTrackInterface;
-using webrtc::AudioTrackVector;
-using webrtc::VideoTrack;
-using webrtc::VideoTrackInterface;
-using webrtc::VideoTrackVector;
-using webrtc::DataChannelInterface;
-using webrtc::FakeConstraints;
-using webrtc::IceCandidateInterface;
-using webrtc::MediaConstraintsInterface;
-using webrtc::MediaStreamInterface;
-using webrtc::MediaStreamTrackInterface;
-using webrtc::PeerConnectionInterface;
-using webrtc::SdpParseError;
-using webrtc::SessionDescriptionInterface;
-using webrtc::StreamCollection;
-using webrtc::StreamCollectionInterface;
-
-typedef PeerConnectionInterface::RTCOfferAnswerOptions RTCOfferAnswerOptions;
-
-// Reference SDP with a MediaStream with label "stream1" and audio track with
-// id "audio_1" and a video track with id "video_1;
-static const char kSdpStringWithStream1[] =
-    "v=0\r\n"
-    "o=- 0 0 IN IP4 127.0.0.1\r\n"
-    "s=-\r\n"
-    "t=0 0\r\n"
-    "m=audio 1 RTP/AVPF 103\r\n"
-    "a=mid:audio\r\n"
-    "a=rtpmap:103 ISAC/16000\r\n"
-    "a=ssrc:1 cname:stream1\r\n"
-    "a=ssrc:1 mslabel:stream1\r\n"
-    "a=ssrc:1 label:audiotrack0\r\n"
-    "m=video 1 RTP/AVPF 120\r\n"
-    "a=mid:video\r\n"
-    "a=rtpmap:120 VP8/90000\r\n"
-    "a=ssrc:2 cname:stream1\r\n"
-    "a=ssrc:2 mslabel:stream1\r\n"
-    "a=ssrc:2 label:videotrack0\r\n";
-
-// Reference SDP with two MediaStreams with label "stream1" and "stream2. Each
-// MediaStreams have one audio track and one video track.
-// This uses MSID.
-static const char kSdpStringWith2Stream[] =
-    "v=0\r\n"
-    "o=- 0 0 IN IP4 127.0.0.1\r\n"
-    "s=-\r\n"
-    "t=0 0\r\n"
-    "a=msid-semantic: WMS stream1 stream2\r\n"
-    "m=audio 1 RTP/AVPF 103\r\n"
-    "a=mid:audio\r\n"
-    "a=rtpmap:103 ISAC/16000\r\n"
-    "a=ssrc:1 cname:stream1\r\n"
-    "a=ssrc:1 msid:stream1 audiotrack0\r\n"
-    "a=ssrc:3 cname:stream2\r\n"
-    "a=ssrc:3 msid:stream2 audiotrack1\r\n"
-    "m=video 1 RTP/AVPF 120\r\n"
-    "a=mid:video\r\n"
-    "a=rtpmap:120 VP8/0\r\n"
-    "a=ssrc:2 cname:stream1\r\n"
-    "a=ssrc:2 msid:stream1 videotrack0\r\n"
-    "a=ssrc:4 cname:stream2\r\n"
-    "a=ssrc:4 msid:stream2 videotrack1\r\n";
-
-// Reference SDP without MediaStreams. Msid is not supported.
-static const char kSdpStringWithoutStreams[] =
-    "v=0\r\n"
-    "o=- 0 0 IN IP4 127.0.0.1\r\n"
-    "s=-\r\n"
-    "t=0 0\r\n"
-    "m=audio 1 RTP/AVPF 103\r\n"
-    "a=mid:audio\r\n"
-    "a=rtpmap:103 ISAC/16000\r\n"
-    "m=video 1 RTP/AVPF 120\r\n"
-    "a=mid:video\r\n"
-    "a=rtpmap:120 VP8/90000\r\n";
-
-// Reference SDP without MediaStreams. Msid is supported.
-static const char kSdpStringWithMsidWithoutStreams[] =
-    "v=0\r\n"
-    "o=- 0 0 IN IP4 127.0.0.1\r\n"
-    "s=-\r\n"
-    "t=0 0\r\n"
-    "a=msid-semantic: WMS\r\n"
-    "m=audio 1 RTP/AVPF 103\r\n"
-    "a=mid:audio\r\n"
-    "a=rtpmap:103 ISAC/16000\r\n"
-    "m=video 1 RTP/AVPF 120\r\n"
-    "a=mid:video\r\n"
-    "a=rtpmap:120 VP8/90000\r\n";
-
-// Reference SDP without MediaStreams and audio only.
-static const char kSdpStringWithoutStreamsAudioOnly[] =
-    "v=0\r\n"
-    "o=- 0 0 IN IP4 127.0.0.1\r\n"
-    "s=-\r\n"
-    "t=0 0\r\n"
-    "m=audio 1 RTP/AVPF 103\r\n"
-    "a=mid:audio\r\n"
-    "a=rtpmap:103 ISAC/16000\r\n";
-
-// Reference SENDONLY SDP without MediaStreams. Msid is not supported.
-static const char kSdpStringSendOnlyWithWithoutStreams[] =
-    "v=0\r\n"
-    "o=- 0 0 IN IP4 127.0.0.1\r\n"
-    "s=-\r\n"
-    "t=0 0\r\n"
-    "m=audio 1 RTP/AVPF 103\r\n"
-    "a=mid:audio\r\n"
-    "a=sendonly"
-    "a=rtpmap:103 ISAC/16000\r\n"
-    "m=video 1 RTP/AVPF 120\r\n"
-    "a=mid:video\r\n"
-    "a=sendonly"
-    "a=rtpmap:120 VP8/90000\r\n";
-
-static const char kSdpStringInit[] =
-    "v=0\r\n"
-    "o=- 0 0 IN IP4 127.0.0.1\r\n"
-    "s=-\r\n"
-    "t=0 0\r\n"
-    "a=msid-semantic: WMS\r\n";
-
-static const char kSdpStringAudio[] =
-    "m=audio 1 RTP/AVPF 103\r\n"
-    "a=mid:audio\r\n"
-    "a=rtpmap:103 ISAC/16000\r\n";
-
-static const char kSdpStringVideo[] =
-    "m=video 1 RTP/AVPF 120\r\n"
-    "a=mid:video\r\n"
-    "a=rtpmap:120 VP8/90000\r\n";
-
-static const char kSdpStringMs1Audio0[] =
-    "a=ssrc:1 cname:stream1\r\n"
-    "a=ssrc:1 msid:stream1 audiotrack0\r\n";
-
-static const char kSdpStringMs1Video0[] =
-    "a=ssrc:2 cname:stream1\r\n"
-    "a=ssrc:2 msid:stream1 videotrack0\r\n";
-
-static const char kSdpStringMs1Audio1[] =
-    "a=ssrc:3 cname:stream1\r\n"
-    "a=ssrc:3 msid:stream1 audiotrack1\r\n";
-
-static const char kSdpStringMs1Video1[] =
-    "a=ssrc:4 cname:stream1\r\n"
-    "a=ssrc:4 msid:stream1 videotrack1\r\n";
-
-// Verifies that |options| contain all tracks in |collection| and that
-// the |options| has set the the has_audio and has_video flags correct.
-static void VerifyMediaOptions(StreamCollectionInterface* collection,
-                               const cricket::MediaSessionOptions& options) {
-  if (!collection) {
-    return;
-  }
-
-  size_t stream_index = 0;
-  for (size_t i = 0; i < collection->count(); ++i) {
-    MediaStreamInterface* stream = collection->at(i);
-    AudioTrackVector audio_tracks = stream->GetAudioTracks();
-    ASSERT_GE(options.streams.size(), stream_index + audio_tracks.size());
-    for (size_t j = 0; j < audio_tracks.size(); ++j) {
-      webrtc::AudioTrackInterface* audio = audio_tracks[j];
-      EXPECT_EQ(options.streams[stream_index].sync_label, stream->label());
-      EXPECT_EQ(options.streams[stream_index++].id, audio->id());
-      EXPECT_TRUE(options.has_audio());
-    }
-    VideoTrackVector video_tracks = stream->GetVideoTracks();
-    ASSERT_GE(options.streams.size(), stream_index + video_tracks.size());
-    for (size_t j = 0; j < video_tracks.size(); ++j) {
-      webrtc::VideoTrackInterface* video = video_tracks[j];
-      EXPECT_EQ(options.streams[stream_index].sync_label, stream->label());
-      EXPECT_EQ(options.streams[stream_index++].id, video->id());
-      EXPECT_TRUE(options.has_video());
-    }
-  }
-}
-
-static bool CompareStreamCollections(StreamCollectionInterface* s1,
-                                     StreamCollectionInterface* s2) {
-  if (s1 == NULL || s2 == NULL || s1->count() != s2->count())
-    return false;
-
-  for (size_t i = 0; i != s1->count(); ++i) {
-    if (s1->at(i)->label() != s2->at(i)->label())
-      return false;
-    webrtc::AudioTrackVector audio_tracks1 = s1->at(i)->GetAudioTracks();
-    webrtc::AudioTrackVector audio_tracks2 = s2->at(i)->GetAudioTracks();
-    webrtc::VideoTrackVector video_tracks1 = s1->at(i)->GetVideoTracks();
-    webrtc::VideoTrackVector video_tracks2 = s2->at(i)->GetVideoTracks();
-
-    if (audio_tracks1.size() != audio_tracks2.size())
-      return false;
-    for (size_t j = 0; j != audio_tracks1.size(); ++j) {
-       if (audio_tracks1[j]->id() != audio_tracks2[j]->id())
-         return false;
-    }
-    if (video_tracks1.size() != video_tracks2.size())
-      return false;
-    for (size_t j = 0; j != video_tracks1.size(); ++j) {
-      if (video_tracks1[j]->id() != video_tracks2[j]->id())
-        return false;
-    }
-  }
-  return true;
-}
-
-class FakeDataChannelFactory : public webrtc::DataChannelFactory {
- public:
-  FakeDataChannelFactory(FakeDataChannelProvider* provider,
-                         cricket::DataChannelType dct,
-                         webrtc::MediaStreamSignaling* media_stream_signaling)
-      : provider_(provider),
-        type_(dct),
-        media_stream_signaling_(media_stream_signaling) {}
-
-  virtual rtc::scoped_refptr<webrtc::DataChannel> CreateDataChannel(
-      const std::string& label,
-      const webrtc::InternalDataChannelInit* config) {
-    last_init_ = *config;
-    rtc::scoped_refptr<webrtc::DataChannel> data_channel =
-        webrtc::DataChannel::Create(provider_, type_, label, *config);
-    media_stream_signaling_->AddDataChannel(data_channel);
-    return data_channel;
-  }
-
-  const webrtc::InternalDataChannelInit& last_init() const {
-      return last_init_;
-  }
-
- private:
-  FakeDataChannelProvider* provider_;
-  cricket::DataChannelType type_;
-  webrtc::MediaStreamSignaling* media_stream_signaling_;
-  webrtc::InternalDataChannelInit last_init_;
-};
-
-class MockSignalingObserver : public webrtc::MediaStreamSignalingObserver {
- public:
-  MockSignalingObserver()
-      : remote_media_streams_(StreamCollection::Create()) {
-  }
-
-  virtual ~MockSignalingObserver() {
-  }
-
-  // New remote stream have been discovered.
-  virtual void OnAddRemoteStream(MediaStreamInterface* remote_stream) {
-    remote_media_streams_->AddStream(remote_stream);
-  }
-
-  // Remote stream is no longer available.
-  virtual void OnRemoveRemoteStream(MediaStreamInterface* remote_stream) {
-    remote_media_streams_->RemoveStream(remote_stream);
-  }
-
-  virtual void OnAddDataChannel(DataChannelInterface* data_channel) {
-  }
-
-  virtual void OnAddLocalAudioTrack(MediaStreamInterface* stream,
-                                    AudioTrackInterface* audio_track,
-                                    uint32_t ssrc) {
-    AddTrack(&local_audio_tracks_, stream, audio_track, ssrc);
-  }
-
-  virtual void OnAddLocalVideoTrack(MediaStreamInterface* stream,
-                                    VideoTrackInterface* video_track,
-                                    uint32_t ssrc) {
-    AddTrack(&local_video_tracks_, stream, video_track, ssrc);
-  }
-
-  virtual void OnRemoveLocalAudioTrack(MediaStreamInterface* stream,
-                                       AudioTrackInterface* audio_track,
-                                       uint32_t ssrc) {
-    RemoveTrack(&local_audio_tracks_, stream, audio_track);
-  }
-
-  virtual void OnRemoveLocalVideoTrack(MediaStreamInterface* stream,
-                                       VideoTrackInterface* video_track) {
-    RemoveTrack(&local_video_tracks_, stream, video_track);
-  }
-
-  virtual void OnAddRemoteAudioTrack(MediaStreamInterface* stream,
-                                     AudioTrackInterface* audio_track,
-                                     uint32_t ssrc) {
-    AddTrack(&remote_audio_tracks_, stream, audio_track, ssrc);
-  }
-
-  virtual void OnAddRemoteVideoTrack(MediaStreamInterface* stream,
-                                     VideoTrackInterface* video_track,
-                                     uint32_t ssrc) {
-    AddTrack(&remote_video_tracks_, stream, video_track, ssrc);
-  }
-
-  virtual void OnRemoveRemoteAudioTrack(MediaStreamInterface* stream,
-                                        AudioTrackInterface* audio_track) {
-    RemoveTrack(&remote_audio_tracks_, stream, audio_track);
-  }
-
-  virtual void OnRemoveRemoteVideoTrack(MediaStreamInterface* stream,
-                                        VideoTrackInterface* video_track) {
-    RemoveTrack(&remote_video_tracks_, stream, video_track);
-  }
-
-  virtual void OnRemoveLocalStream(MediaStreamInterface* stream) {
-  }
-
-  MediaStreamInterface* RemoteStream(const std::string& label) {
-    return remote_media_streams_->find(label);
-  }
-
-  StreamCollectionInterface* remote_streams() const {
-    return remote_media_streams_;
-  }
-
-  size_t NumberOfRemoteAudioTracks() { return remote_audio_tracks_.size(); }
-
-  void VerifyRemoteAudioTrack(const std::string& stream_label,
-                              const std::string& track_id,
-                              uint32_t ssrc) {
-    VerifyTrack(remote_audio_tracks_, stream_label, track_id, ssrc);
-  }
-
-  size_t NumberOfRemoteVideoTracks() { return remote_video_tracks_.size(); }
-
-  void VerifyRemoteVideoTrack(const std::string& stream_label,
-                              const std::string& track_id,
-                              uint32_t ssrc) {
-    VerifyTrack(remote_video_tracks_, stream_label, track_id, ssrc);
-  }
-
-  size_t NumberOfLocalAudioTracks() { return local_audio_tracks_.size(); }
-  void VerifyLocalAudioTrack(const std::string& stream_label,
-                             const std::string& track_id,
-                             uint32_t ssrc) {
-    VerifyTrack(local_audio_tracks_, stream_label, track_id, ssrc);
-  }
-
-  size_t NumberOfLocalVideoTracks() { return local_video_tracks_.size(); }
-
-  void VerifyLocalVideoTrack(const std::string& stream_label,
-                             const std::string& track_id,
-                             uint32_t ssrc) {
-    VerifyTrack(local_video_tracks_, stream_label, track_id, ssrc);
-  }
-
- private:
-  struct TrackInfo {
-    TrackInfo() {}
-    TrackInfo(const std::string& stream_label,
-              const std::string track_id,
-              uint32_t ssrc)
-        : stream_label(stream_label), track_id(track_id), ssrc(ssrc) {}
-    std::string stream_label;
-    std::string track_id;
-    uint32_t ssrc;
-  };
-  typedef std::vector<TrackInfo> TrackInfos;
-
-  void AddTrack(TrackInfos* track_infos,
-                MediaStreamInterface* stream,
-                MediaStreamTrackInterface* track,
-                uint32_t ssrc) {
-    (*track_infos).push_back(TrackInfo(stream->label(), track->id(), ssrc));
-  }
-
-  void RemoveTrack(TrackInfos* track_infos, MediaStreamInterface* stream,
-                   MediaStreamTrackInterface* track) {
-    for (TrackInfos::iterator it = track_infos->begin();
-         it != track_infos->end(); ++it) {
-      if (it->stream_label == stream->label() && it->track_id == track->id()) {
-        track_infos->erase(it);
-        return;
-      }
-    }
-    ADD_FAILURE();
-  }
-
-  const TrackInfo* FindTrackInfo(const TrackInfos& infos,
-                                 const std::string& stream_label,
-                                 const std::string track_id) const {
-    for (TrackInfos::const_iterator it = infos.begin();
-        it != infos.end(); ++it) {
-      if (it->stream_label == stream_label && it->track_id == track_id)
-        return &*it;
-    }
-    return NULL;
-  }
-
-  void VerifyTrack(const TrackInfos& track_infos,
-                   const std::string& stream_label,
-                   const std::string& track_id,
-                   uint32_t ssrc) {
-    const TrackInfo* track_info = FindTrackInfo(track_infos,
-                                                stream_label,
-                                                track_id);
-    ASSERT_TRUE(track_info != NULL);
-    EXPECT_EQ(ssrc, track_info->ssrc);
-  }
-
-  TrackInfos remote_audio_tracks_;
-  TrackInfos remote_video_tracks_;
-  TrackInfos local_audio_tracks_;
-  TrackInfos local_video_tracks_;
-
-  rtc::scoped_refptr<StreamCollection> remote_media_streams_;
-};
-
-class MediaStreamSignalingForTest : public webrtc::MediaStreamSignaling {
- public:
-  MediaStreamSignalingForTest(MockSignalingObserver* observer,
-                              cricket::ChannelManager* channel_manager)
-      : webrtc::MediaStreamSignaling(rtc::Thread::Current(), observer,
-                                     channel_manager) {
-  };
-
-  using webrtc::MediaStreamSignaling::GetOptionsForOffer;
-  using webrtc::MediaStreamSignaling::GetOptionsForAnswer;
-  using webrtc::MediaStreamSignaling::OnRemoteDescriptionChanged;
-  using webrtc::MediaStreamSignaling::remote_streams;
-};
-
-class MediaStreamSignalingTest: public testing::Test {
- protected:
-  virtual void SetUp() {
-    observer_.reset(new MockSignalingObserver());
-    channel_manager_.reset(
-        new cricket::ChannelManager(new cricket::FakeMediaEngine(),
-                                    rtc::Thread::Current()));
-    signaling_.reset(new MediaStreamSignalingForTest(observer_.get(),
-                                                     channel_manager_.get()));
-    data_channel_provider_.reset(new FakeDataChannelProvider());
-  }
-
-  // Create a collection of streams.
-  // CreateStreamCollection(1) creates a collection that
-  // correspond to kSdpString1.
-  // CreateStreamCollection(2) correspond to kSdpString2.
-  rtc::scoped_refptr<StreamCollection>
-  CreateStreamCollection(int number_of_streams) {
-    rtc::scoped_refptr<StreamCollection> local_collection(
-        StreamCollection::Create());
-
-    for (int i = 0; i < number_of_streams; ++i) {
-      rtc::scoped_refptr<webrtc::MediaStreamInterface> stream(
-          webrtc::MediaStream::Create(kStreams[i]));
-
-      // Add a local audio track.
-      rtc::scoped_refptr<webrtc::AudioTrackInterface> audio_track(
-          webrtc::AudioTrack::Create(kAudioTracks[i], NULL));
-      stream->AddTrack(audio_track);
-
-      // Add a local video track.
-      rtc::scoped_refptr<webrtc::VideoTrackInterface> video_track(
-          webrtc::VideoTrack::Create(kVideoTracks[i], NULL));
-      stream->AddTrack(video_track);
-
-      local_collection->AddStream(stream);
-    }
-    return local_collection;
-  }
-
-  // This functions Creates a MediaStream with label kStreams[0] and
-  // |number_of_audio_tracks| and |number_of_video_tracks| tracks and the
-  // corresponding SessionDescriptionInterface. The SessionDescriptionInterface
-  // is returned in |desc| and the MediaStream is stored in
-  // |reference_collection_|
-  void CreateSessionDescriptionAndReference(
-      size_t number_of_audio_tracks,
-      size_t number_of_video_tracks,
-      SessionDescriptionInterface** desc) {
-    ASSERT_TRUE(desc != NULL);
-    ASSERT_LE(number_of_audio_tracks, 2u);
-    ASSERT_LE(number_of_video_tracks, 2u);
-
-    reference_collection_ = StreamCollection::Create();
-    std::string sdp_ms1 = std::string(kSdpStringInit);
-
-    std::string mediastream_label = kStreams[0];
-
-    rtc::scoped_refptr<webrtc::MediaStreamInterface> stream(
-            webrtc::MediaStream::Create(mediastream_label));
-    reference_collection_->AddStream(stream);
-
-    if (number_of_audio_tracks > 0) {
-      sdp_ms1 += std::string(kSdpStringAudio);
-      sdp_ms1 += std::string(kSdpStringMs1Audio0);
-      AddAudioTrack(kAudioTracks[0], stream);
-    }
-    if (number_of_audio_tracks > 1) {
-      sdp_ms1 += kSdpStringMs1Audio1;
-      AddAudioTrack(kAudioTracks[1], stream);
-    }
-
-    if (number_of_video_tracks > 0) {
-      sdp_ms1 += std::string(kSdpStringVideo);
-      sdp_ms1 += std::string(kSdpStringMs1Video0);
-      AddVideoTrack(kVideoTracks[0], stream);
-    }
-    if (number_of_video_tracks > 1) {
-      sdp_ms1 += kSdpStringMs1Video1;
-      AddVideoTrack(kVideoTracks[1], stream);
-    }
-
-    *desc = webrtc::CreateSessionDescription(
-        SessionDescriptionInterface::kOffer, sdp_ms1, NULL);
-  }
-
-  void AddAudioTrack(const std::string& track_id,
-                     MediaStreamInterface* stream) {
-    rtc::scoped_refptr<webrtc::AudioTrackInterface> audio_track(
-        webrtc::AudioTrack::Create(track_id, NULL));
-    ASSERT_TRUE(stream->AddTrack(audio_track));
-  }
-
-  void AddVideoTrack(const std::string& track_id,
-                     MediaStreamInterface* stream) {
-    rtc::scoped_refptr<webrtc::VideoTrackInterface> video_track(
-        webrtc::VideoTrack::Create(track_id, NULL));
-    ASSERT_TRUE(stream->AddTrack(video_track));
-  }
-
-  rtc::scoped_refptr<webrtc::DataChannel> AddDataChannel(
-      cricket::DataChannelType type, const std::string& label, int id) {
-    webrtc::InternalDataChannelInit config;
-    config.id = id;
-    rtc::scoped_refptr<webrtc::DataChannel> data_channel(
-        webrtc::DataChannel::Create(
-            data_channel_provider_.get(), type, label, config));
-    EXPECT_TRUE(data_channel.get() != NULL);
-    EXPECT_TRUE(signaling_->AddDataChannel(data_channel.get()));
-    return data_channel;
-  }
-
-  // ChannelManager is used by VideoSource, so it should be released after all
-  // the video tracks. Put it as the first private variable should ensure that.
-  rtc::scoped_ptr<cricket::ChannelManager> channel_manager_;
-  rtc::scoped_refptr<StreamCollection> reference_collection_;
-  rtc::scoped_ptr<MockSignalingObserver> observer_;
-  rtc::scoped_ptr<MediaStreamSignalingForTest> signaling_;
-  rtc::scoped_ptr<FakeDataChannelProvider> data_channel_provider_;
-};
-
-TEST_F(MediaStreamSignalingTest, GetOptionsForOfferWithInvalidAudioOption) {
-  RTCOfferAnswerOptions rtc_options;
-  rtc_options.offer_to_receive_audio = RTCOfferAnswerOptions::kUndefined - 1;
-
-  cricket::MediaSessionOptions options;
-  EXPECT_FALSE(signaling_->GetOptionsForOffer(rtc_options, &options));
-
-  rtc_options.offer_to_receive_audio =
-      RTCOfferAnswerOptions::kMaxOfferToReceiveMedia + 1;
-  EXPECT_FALSE(signaling_->GetOptionsForOffer(rtc_options, &options));
-}
-
-
-TEST_F(MediaStreamSignalingTest, GetOptionsForOfferWithInvalidVideoOption) {
-  RTCOfferAnswerOptions rtc_options;
-  rtc_options.offer_to_receive_video =
-      RTCOfferAnswerOptions::kUndefined - 1;
-
-  cricket::MediaSessionOptions options;
-  EXPECT_FALSE(signaling_->GetOptionsForOffer(rtc_options, &options));
-
-  rtc_options.offer_to_receive_video =
-      RTCOfferAnswerOptions::kMaxOfferToReceiveMedia + 1;
-  EXPECT_FALSE(signaling_->GetOptionsForOffer(rtc_options, &options));
-}
-
-// Test that a MediaSessionOptions is created for an offer if
-// OfferToReceiveAudio and OfferToReceiveVideo options are set but no
-// MediaStreams are sent.
-TEST_F(MediaStreamSignalingTest, GetMediaSessionOptionsForOfferWithAudioVideo) {
-  RTCOfferAnswerOptions rtc_options;
-  rtc_options.offer_to_receive_audio = 1;
-  rtc_options.offer_to_receive_video = 1;
-
-  cricket::MediaSessionOptions options;
-  EXPECT_TRUE(signaling_->GetOptionsForOffer(rtc_options, &options));
-  EXPECT_TRUE(options.has_audio());
-  EXPECT_TRUE(options.has_video());
-  EXPECT_TRUE(options.bundle_enabled);
-}
-
-// Test that a correct MediaSessionOptions is created for an offer if
-// OfferToReceiveAudio is set but no MediaStreams are sent.
-TEST_F(MediaStreamSignalingTest, GetMediaSessionOptionsForOfferWithAudio) {
-  RTCOfferAnswerOptions rtc_options;
-  rtc_options.offer_to_receive_audio = 1;
-
-  cricket::MediaSessionOptions options;
-  EXPECT_TRUE(signaling_->GetOptionsForOffer(rtc_options, &options));
-  EXPECT_TRUE(options.has_audio());
-  EXPECT_FALSE(options.has_video());
-  EXPECT_TRUE(options.bundle_enabled);
-}
-
-// Test that a correct MediaSessionOptions is created for an offer if
-// the default OfferOptons is used or MediaStreams are sent.
-TEST_F(MediaStreamSignalingTest, GetDefaultMediaSessionOptionsForOffer) {
-  RTCOfferAnswerOptions rtc_options;
-
-  cricket::MediaSessionOptions options;
-  EXPECT_TRUE(signaling_->GetOptionsForOffer(rtc_options, &options));
-  EXPECT_FALSE(options.has_audio());
-  EXPECT_FALSE(options.has_video());
-  EXPECT_FALSE(options.bundle_enabled);
-  EXPECT_TRUE(options.vad_enabled);
-  EXPECT_FALSE(options.transport_options.ice_restart);
-}
-
-// Test that a correct MediaSessionOptions is created for an offer if
-// OfferToReceiveVideo is set but no MediaStreams are sent.
-TEST_F(MediaStreamSignalingTest, GetMediaSessionOptionsForOfferWithVideo) {
-  RTCOfferAnswerOptions rtc_options;
-  rtc_options.offer_to_receive_audio = 0;
-  rtc_options.offer_to_receive_video = 1;
-
-  cricket::MediaSessionOptions options;
-  EXPECT_TRUE(signaling_->GetOptionsForOffer(rtc_options, &options));
-  EXPECT_FALSE(options.has_audio());
-  EXPECT_TRUE(options.has_video());
-  EXPECT_TRUE(options.bundle_enabled);
-}
-
-// Test that a correct MediaSessionOptions is created for an offer if
-// UseRtpMux is set to false.
-TEST_F(MediaStreamSignalingTest,
-       GetMediaSessionOptionsForOfferWithBundleDisabled) {
-  RTCOfferAnswerOptions rtc_options;
-  rtc_options.offer_to_receive_audio = 1;
-  rtc_options.offer_to_receive_video = 1;
-  rtc_options.use_rtp_mux = false;
-
-  cricket::MediaSessionOptions options;
-  EXPECT_TRUE(signaling_->GetOptionsForOffer(rtc_options, &options));
-  EXPECT_TRUE(options.has_audio());
-  EXPECT_TRUE(options.has_video());
-  EXPECT_FALSE(options.bundle_enabled);
-}
-
-// Test that a correct MediaSessionOptions is created to restart ice if
-// IceRestart is set. It also tests that subsequent MediaSessionOptions don't
-// have |transport_options.ice_restart| set.
-TEST_F(MediaStreamSignalingTest,
-       GetMediaSessionOptionsForOfferWithIceRestart) {
-  RTCOfferAnswerOptions rtc_options;
-  rtc_options.ice_restart = true;
-
-  cricket::MediaSessionOptions options;
-  EXPECT_TRUE(signaling_->GetOptionsForOffer(rtc_options, &options));
-  EXPECT_TRUE(options.transport_options.ice_restart);
-
-  rtc_options = RTCOfferAnswerOptions();
-  EXPECT_TRUE(signaling_->GetOptionsForOffer(rtc_options, &options));
-  EXPECT_FALSE(options.transport_options.ice_restart);
-}
-
-// Test that a correct MediaSessionOptions are created for an offer if
-// a MediaStream is sent and later updated with a new track.
-// MediaConstraints are not used.
-TEST_F(MediaStreamSignalingTest, AddTrackToLocalMediaStream) {
-  RTCOfferAnswerOptions rtc_options;
-  rtc::scoped_refptr<StreamCollection> local_streams(
-      CreateStreamCollection(1));
-  MediaStreamInterface* local_stream = local_streams->at(0);
-  EXPECT_TRUE(signaling_->AddLocalStream(local_stream));
-  cricket::MediaSessionOptions options;
-  EXPECT_TRUE(signaling_->GetOptionsForOffer(rtc_options, &options));
-  VerifyMediaOptions(local_streams, options);
-
-  cricket::MediaSessionOptions updated_options;
-  local_stream->AddTrack(AudioTrack::Create(kAudioTracks[1], NULL));
-  EXPECT_TRUE(signaling_->GetOptionsForOffer(rtc_options, &options));
-  VerifyMediaOptions(local_streams, options);
-}
-
-// Test that the MediaConstraints in an answer don't affect if audio and video
-// is offered in an offer but that if kOfferToReceiveAudio or
-// kOfferToReceiveVideo constraints are true in an offer, the media type will be
-// included in subsequent answers.
-TEST_F(MediaStreamSignalingTest, MediaConstraintsInAnswer) {
-  FakeConstraints answer_c;
-  answer_c.SetMandatoryReceiveAudio(true);
-  answer_c.SetMandatoryReceiveVideo(true);
-
-  cricket::MediaSessionOptions answer_options;
-  EXPECT_TRUE(signaling_->GetOptionsForAnswer(&answer_c, &answer_options));
-  EXPECT_TRUE(answer_options.has_audio());
-  EXPECT_TRUE(answer_options.has_video());
-
-  RTCOfferAnswerOptions rtc_offer_optoins;
-
-  cricket::MediaSessionOptions offer_options;
-  EXPECT_TRUE(
-      signaling_->GetOptionsForOffer(rtc_offer_optoins, &offer_options));
-  EXPECT_FALSE(offer_options.has_audio());
-  EXPECT_FALSE(offer_options.has_video());
-
-  RTCOfferAnswerOptions updated_rtc_offer_optoins;
-  updated_rtc_offer_optoins.offer_to_receive_audio = 1;
-  updated_rtc_offer_optoins.offer_to_receive_video = 1;
-
-  cricket::MediaSessionOptions updated_offer_options;
-  EXPECT_TRUE(signaling_->GetOptionsForOffer(updated_rtc_offer_optoins,
-                                             &updated_offer_options));
-  EXPECT_TRUE(updated_offer_options.has_audio());
-  EXPECT_TRUE(updated_offer_options.has_video());
-
-  // Since an offer has been created with both audio and video, subsequent
-  // offers and answers should contain both audio and video.
-  // Answers will only contain the media types that exist in the offer
-  // regardless of the value of |updated_answer_options.has_audio| and
-  // |updated_answer_options.has_video|.
-  FakeConstraints updated_answer_c;
-  answer_c.SetMandatoryReceiveAudio(false);
-  answer_c.SetMandatoryReceiveVideo(false);
-
-  cricket::MediaSessionOptions updated_answer_options;
-  EXPECT_TRUE(signaling_->GetOptionsForAnswer(&updated_answer_c,
-                                              &updated_answer_options));
-  EXPECT_TRUE(updated_answer_options.has_audio());
-  EXPECT_TRUE(updated_answer_options.has_video());
-
-  RTCOfferAnswerOptions default_rtc_options;
-  EXPECT_TRUE(signaling_->GetOptionsForOffer(default_rtc_options,
-                                             &updated_offer_options));
-  // By default, |has_audio| or |has_video| are false if there is no media
-  // track.
-  EXPECT_FALSE(updated_offer_options.has_audio());
-  EXPECT_FALSE(updated_offer_options.has_video());
-}
-
-// This test verifies that the remote MediaStreams corresponding to a received
-// SDP string is created. In this test the two separate MediaStreams are
-// signaled.
-TEST_F(MediaStreamSignalingTest, UpdateRemoteStreams) {
-  rtc::scoped_ptr<SessionDescriptionInterface> desc(
-      webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer,
-                                       kSdpStringWithStream1, NULL));
-  EXPECT_TRUE(desc != NULL);
-  signaling_->OnRemoteDescriptionChanged(desc.get());
-
-  rtc::scoped_refptr<StreamCollection> reference(
-      CreateStreamCollection(1));
-  EXPECT_TRUE(CompareStreamCollections(signaling_->remote_streams(),
-                                       reference.get()));
-  EXPECT_TRUE(CompareStreamCollections(observer_->remote_streams(),
-                                       reference.get()));
-  EXPECT_EQ(1u, observer_->NumberOfRemoteAudioTracks());
-  observer_->VerifyRemoteAudioTrack(kStreams[0], kAudioTracks[0], 1);
-  EXPECT_EQ(1u, observer_->NumberOfRemoteVideoTracks());
-  observer_->VerifyRemoteVideoTrack(kStreams[0], kVideoTracks[0], 2);
-  ASSERT_EQ(1u, observer_->remote_streams()->count());
-  MediaStreamInterface* remote_stream =  observer_->remote_streams()->at(0);
-  EXPECT_TRUE(remote_stream->GetVideoTracks()[0]->GetSource() != NULL);
-
-  // Create a session description based on another SDP with another
-  // MediaStream.
-  rtc::scoped_ptr<SessionDescriptionInterface> update_desc(
-      webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer,
-                                       kSdpStringWith2Stream, NULL));
-  EXPECT_TRUE(update_desc != NULL);
-  signaling_->OnRemoteDescriptionChanged(update_desc.get());
-
-  rtc::scoped_refptr<StreamCollection> reference2(
-      CreateStreamCollection(2));
-  EXPECT_TRUE(CompareStreamCollections(signaling_->remote_streams(),
-                                       reference2.get()));
-  EXPECT_TRUE(CompareStreamCollections(observer_->remote_streams(),
-                                       reference2.get()));
-
-  EXPECT_EQ(2u, observer_->NumberOfRemoteAudioTracks());
-  observer_->VerifyRemoteAudioTrack(kStreams[0], kAudioTracks[0], 1);
-  observer_->VerifyRemoteAudioTrack(kStreams[1], kAudioTracks[1], 3);
-  EXPECT_EQ(2u, observer_->NumberOfRemoteVideoTracks());
-  observer_->VerifyRemoteVideoTrack(kStreams[0], kVideoTracks[0], 2);
-  observer_->VerifyRemoteVideoTrack(kStreams[1], kVideoTracks[1], 4);
-}
-
-// This test verifies that the remote MediaStreams corresponding to a received
-// SDP string is created. In this test the same remote MediaStream is signaled
-// but MediaStream tracks are added and removed.
-TEST_F(MediaStreamSignalingTest, AddRemoveTrackFromExistingRemoteMediaStream) {
-  rtc::scoped_ptr<SessionDescriptionInterface> desc_ms1;
-  CreateSessionDescriptionAndReference(1, 1, desc_ms1.use());
-  signaling_->OnRemoteDescriptionChanged(desc_ms1.get());
-  EXPECT_TRUE(CompareStreamCollections(signaling_->remote_streams(),
-                                       reference_collection_));
-
-  // Add extra audio and video tracks to the same MediaStream.
-  rtc::scoped_ptr<SessionDescriptionInterface> desc_ms1_two_tracks;
-  CreateSessionDescriptionAndReference(2, 2, desc_ms1_two_tracks.use());
-  signaling_->OnRemoteDescriptionChanged(desc_ms1_two_tracks.get());
-  EXPECT_TRUE(CompareStreamCollections(signaling_->remote_streams(),
-                                       reference_collection_));
-  EXPECT_TRUE(CompareStreamCollections(observer_->remote_streams(),
-                                       reference_collection_));
-
-  // Remove the extra audio and video tracks again.
-  rtc::scoped_ptr<SessionDescriptionInterface> desc_ms2;
-  CreateSessionDescriptionAndReference(1, 1, desc_ms2.use());
-  signaling_->OnRemoteDescriptionChanged(desc_ms2.get());
-  EXPECT_TRUE(CompareStreamCollections(signaling_->remote_streams(),
-                                       reference_collection_));
-  EXPECT_TRUE(CompareStreamCollections(observer_->remote_streams(),
-                                       reference_collection_));
-}
-
-// This test that remote tracks are ended if a
-// local session description is set that rejects the media content type.
-TEST_F(MediaStreamSignalingTest, RejectMediaContent) {
-  rtc::scoped_ptr<SessionDescriptionInterface> desc(
-      webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer,
-                                       kSdpStringWithStream1, NULL));
-  EXPECT_TRUE(desc != NULL);
-  signaling_->OnRemoteDescriptionChanged(desc.get());
-
-  ASSERT_EQ(1u, observer_->remote_streams()->count());
-  MediaStreamInterface* remote_stream =  observer_->remote_streams()->at(0);
-  ASSERT_EQ(1u, remote_stream->GetVideoTracks().size());
-  ASSERT_EQ(1u, remote_stream->GetAudioTracks().size());
-
-  rtc::scoped_refptr<webrtc::VideoTrackInterface> remote_video =
-      remote_stream->GetVideoTracks()[0];
-  EXPECT_EQ(webrtc::MediaStreamTrackInterface::kLive, remote_video->state());
-  rtc::scoped_refptr<webrtc::AudioTrackInterface> remote_audio =
-      remote_stream->GetAudioTracks()[0];
-  EXPECT_EQ(webrtc::MediaStreamTrackInterface::kLive, remote_audio->state());
-
-  cricket::ContentInfo* video_info =
-      desc->description()->GetContentByName("video");
-  ASSERT_TRUE(video_info != NULL);
-  video_info->rejected = true;
-  signaling_->OnLocalDescriptionChanged(desc.get());
-  EXPECT_EQ(webrtc::MediaStreamTrackInterface::kEnded, remote_video->state());
-  EXPECT_EQ(webrtc::MediaStreamTrackInterface::kLive, remote_audio->state());
-
-  cricket::ContentInfo* audio_info =
-      desc->description()->GetContentByName("audio");
-  ASSERT_TRUE(audio_info != NULL);
-  audio_info->rejected = true;
-  signaling_->OnLocalDescriptionChanged(desc.get());
-  EXPECT_EQ(webrtc::MediaStreamTrackInterface::kEnded, remote_audio->state());
-}
-
-// This test that it won't crash if the remote track as been removed outside
-// of MediaStreamSignaling and then MediaStreamSignaling tries to reject
-// this track.
-TEST_F(MediaStreamSignalingTest, RemoveTrackThenRejectMediaContent) {
-  rtc::scoped_ptr<SessionDescriptionInterface> desc(
-      webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer,
-                                       kSdpStringWithStream1, NULL));
-  EXPECT_TRUE(desc != NULL);
-  signaling_->OnRemoteDescriptionChanged(desc.get());
-
-  MediaStreamInterface* remote_stream =  observer_->remote_streams()->at(0);
-  remote_stream->RemoveTrack(remote_stream->GetVideoTracks()[0]);
-  remote_stream->RemoveTrack(remote_stream->GetAudioTracks()[0]);
-
-  cricket::ContentInfo* video_info =
-      desc->description()->GetContentByName("video");
-  video_info->rejected = true;
-  signaling_->OnLocalDescriptionChanged(desc.get());
-
-  cricket::ContentInfo* audio_info =
-      desc->description()->GetContentByName("audio");
-  audio_info->rejected = true;
-  signaling_->OnLocalDescriptionChanged(desc.get());
-
-  // No crash is a pass.
-}
-
-// This tests that a default MediaStream is created if a remote session
-// description doesn't contain any streams and no MSID support.
-// It also tests that the default stream is updated if a video m-line is added
-// in a subsequent session description.
-TEST_F(MediaStreamSignalingTest, SdpWithoutMsidCreatesDefaultStream) {
-  rtc::scoped_ptr<SessionDescriptionInterface> desc_audio_only(
-      webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer,
-                                       kSdpStringWithoutStreamsAudioOnly,
-                                       NULL));
-  ASSERT_TRUE(desc_audio_only != NULL);
-  signaling_->OnRemoteDescriptionChanged(desc_audio_only.get());
-
-  EXPECT_EQ(1u, signaling_->remote_streams()->count());
-  ASSERT_EQ(1u, observer_->remote_streams()->count());
-  MediaStreamInterface* remote_stream = observer_->remote_streams()->at(0);
-
-  EXPECT_EQ(1u, remote_stream->GetAudioTracks().size());
-  EXPECT_EQ(0u, remote_stream->GetVideoTracks().size());
-  EXPECT_EQ("default", remote_stream->label());
-
-  rtc::scoped_ptr<SessionDescriptionInterface> desc(
-      webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer,
-                                       kSdpStringWithoutStreams, NULL));
-  ASSERT_TRUE(desc != NULL);
-  signaling_->OnRemoteDescriptionChanged(desc.get());
-  EXPECT_EQ(1u, signaling_->remote_streams()->count());
-  ASSERT_EQ(1u, remote_stream->GetAudioTracks().size());
-  EXPECT_EQ("defaulta0", remote_stream->GetAudioTracks()[0]->id());
-  ASSERT_EQ(1u, remote_stream->GetVideoTracks().size());
-  EXPECT_EQ("defaultv0", remote_stream->GetVideoTracks()[0]->id());
-  observer_->VerifyRemoteAudioTrack("default", "defaulta0", 0);
-  observer_->VerifyRemoteVideoTrack("default", "defaultv0", 0);
-}
-
-// This tests that a default MediaStream is created if a remote session
-// description doesn't contain any streams and media direction is send only.
-TEST_F(MediaStreamSignalingTest, RecvOnlySdpWithoutMsidCreatesDefaultStream) {
-  rtc::scoped_ptr<SessionDescriptionInterface> desc(
-      webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer,
-                                       kSdpStringSendOnlyWithWithoutStreams,
-                                       NULL));
-  ASSERT_TRUE(desc != NULL);
-  signaling_->OnRemoteDescriptionChanged(desc.get());
-
-  EXPECT_EQ(1u, signaling_->remote_streams()->count());
-  ASSERT_EQ(1u, observer_->remote_streams()->count());
-  MediaStreamInterface* remote_stream = observer_->remote_streams()->at(0);
-
-  EXPECT_EQ(1u, remote_stream->GetAudioTracks().size());
-  EXPECT_EQ(1u, remote_stream->GetVideoTracks().size());
-  EXPECT_EQ("default", remote_stream->label());
-}
-
-// This tests that it won't crash when MediaStreamSignaling tries to remove
-//  a remote track that as already been removed from the mediastream.
-TEST_F(MediaStreamSignalingTest, RemoveAlreadyGoneRemoteStream) {
-  rtc::scoped_ptr<SessionDescriptionInterface> desc_audio_only(
-      webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer,
-                                       kSdpStringWithoutStreams,
-                                       NULL));
-  ASSERT_TRUE(desc_audio_only != NULL);
-  signaling_->OnRemoteDescriptionChanged(desc_audio_only.get());
-  MediaStreamInterface* remote_stream = observer_->remote_streams()->at(0);
-  remote_stream->RemoveTrack(remote_stream->GetAudioTracks()[0]);
-  remote_stream->RemoveTrack(remote_stream->GetVideoTracks()[0]);
-
-  rtc::scoped_ptr<SessionDescriptionInterface> desc(
-      webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer,
-                                       kSdpStringWithoutStreams, NULL));
-  ASSERT_TRUE(desc != NULL);
-  signaling_->OnRemoteDescriptionChanged(desc.get());
-
-  // No crash is a pass.
-}
-
-// This tests that a default MediaStream is created if the remote session
-// description doesn't contain any streams and don't contain an indication if
-// MSID is supported.
-TEST_F(MediaStreamSignalingTest,
-       SdpWithoutMsidAndStreamsCreatesDefaultStream) {
-  rtc::scoped_ptr<SessionDescriptionInterface> desc(
-      webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer,
-                                       kSdpStringWithoutStreams,
-                                       NULL));
-  ASSERT_TRUE(desc != NULL);
-  signaling_->OnRemoteDescriptionChanged(desc.get());
-
-  ASSERT_EQ(1u, observer_->remote_streams()->count());
-  MediaStreamInterface* remote_stream = observer_->remote_streams()->at(0);
-  EXPECT_EQ(1u, remote_stream->GetAudioTracks().size());
-  EXPECT_EQ(1u, remote_stream->GetVideoTracks().size());
-}
-
-// This tests that a default MediaStream is not created if the remote session
-// description doesn't contain any streams but does support MSID.
-TEST_F(MediaStreamSignalingTest, SdpWitMsidDontCreatesDefaultStream) {
-  rtc::scoped_ptr<SessionDescriptionInterface> desc_msid_without_streams(
-      webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer,
-                                       kSdpStringWithMsidWithoutStreams,
-                                       NULL));
-  signaling_->OnRemoteDescriptionChanged(desc_msid_without_streams.get());
-  EXPECT_EQ(0u, observer_->remote_streams()->count());
-}
-
-// This test that a default MediaStream is not created if a remote session
-// description is updated to not have any MediaStreams.
-TEST_F(MediaStreamSignalingTest, VerifyDefaultStreamIsNotCreated) {
-  rtc::scoped_ptr<SessionDescriptionInterface> desc(
-      webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer,
-                                       kSdpStringWithStream1,
-                                       NULL));
-  ASSERT_TRUE(desc != NULL);
-  signaling_->OnRemoteDescriptionChanged(desc.get());
-  rtc::scoped_refptr<StreamCollection> reference(
-      CreateStreamCollection(1));
-  EXPECT_TRUE(CompareStreamCollections(observer_->remote_streams(),
-                                       reference.get()));
-
-  rtc::scoped_ptr<SessionDescriptionInterface> desc_without_streams(
-      webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer,
-                                       kSdpStringWithoutStreams,
-                                       NULL));
-  signaling_->OnRemoteDescriptionChanged(desc_without_streams.get());
-  EXPECT_EQ(0u, observer_->remote_streams()->count());
-}
-
-// This test that the correct MediaStreamSignalingObserver methods are called
-// when MediaStreamSignaling::OnLocalDescriptionChanged is called with an
-// updated local session description.
-TEST_F(MediaStreamSignalingTest, LocalDescriptionChanged) {
-  rtc::scoped_ptr<SessionDescriptionInterface> desc_1;
-  CreateSessionDescriptionAndReference(2, 2, desc_1.use());
-
-  signaling_->AddLocalStream(reference_collection_->at(0));
-  signaling_->OnLocalDescriptionChanged(desc_1.get());
-  EXPECT_EQ(2u, observer_->NumberOfLocalAudioTracks());
-  EXPECT_EQ(2u, observer_->NumberOfLocalVideoTracks());
-  observer_->VerifyLocalAudioTrack(kStreams[0], kAudioTracks[0], 1);
-  observer_->VerifyLocalVideoTrack(kStreams[0], kVideoTracks[0], 2);
-  observer_->VerifyLocalAudioTrack(kStreams[0], kAudioTracks[1], 3);
-  observer_->VerifyLocalVideoTrack(kStreams[0], kVideoTracks[1], 4);
-
-  // Remove an audio and video track.
-  rtc::scoped_ptr<SessionDescriptionInterface> desc_2;
-  CreateSessionDescriptionAndReference(1, 1, desc_2.use());
-  signaling_->OnLocalDescriptionChanged(desc_2.get());
-  EXPECT_EQ(1u, observer_->NumberOfLocalAudioTracks());
-  EXPECT_EQ(1u, observer_->NumberOfLocalVideoTracks());
-  observer_->VerifyLocalAudioTrack(kStreams[0], kAudioTracks[0], 1);
-  observer_->VerifyLocalVideoTrack(kStreams[0], kVideoTracks[0], 2);
-}
-
-// This test that the correct MediaStreamSignalingObserver methods are called
-// when MediaStreamSignaling::AddLocalStream is called after
-// MediaStreamSignaling::OnLocalDescriptionChanged is called.
-TEST_F(MediaStreamSignalingTest, AddLocalStreamAfterLocalDescriptionChanged) {
-  rtc::scoped_ptr<SessionDescriptionInterface> desc_1;
-  CreateSessionDescriptionAndReference(2, 2, desc_1.use());
-
-  signaling_->OnLocalDescriptionChanged(desc_1.get());
-  EXPECT_EQ(0u, observer_->NumberOfLocalAudioTracks());
-  EXPECT_EQ(0u, observer_->NumberOfLocalVideoTracks());
-
-  signaling_->AddLocalStream(reference_collection_->at(0));
-  EXPECT_EQ(2u, observer_->NumberOfLocalAudioTracks());
-  EXPECT_EQ(2u, observer_->NumberOfLocalVideoTracks());
-  observer_->VerifyLocalAudioTrack(kStreams[0], kAudioTracks[0], 1);
-  observer_->VerifyLocalVideoTrack(kStreams[0], kVideoTracks[0], 2);
-  observer_->VerifyLocalAudioTrack(kStreams[0], kAudioTracks[1], 3);
-  observer_->VerifyLocalVideoTrack(kStreams[0], kVideoTracks[1], 4);
-}
-
-// This test that the correct MediaStreamSignalingObserver methods are called
-// if the ssrc on a local track is changed when
-// MediaStreamSignaling::OnLocalDescriptionChanged is called.
-TEST_F(MediaStreamSignalingTest, ChangeSsrcOnTrackInLocalSessionDescription) {
-  rtc::scoped_ptr<SessionDescriptionInterface> desc;
-  CreateSessionDescriptionAndReference(1, 1, desc.use());
-
-  signaling_->AddLocalStream(reference_collection_->at(0));
-  signaling_->OnLocalDescriptionChanged(desc.get());
-  EXPECT_EQ(1u, observer_->NumberOfLocalAudioTracks());
-  EXPECT_EQ(1u, observer_->NumberOfLocalVideoTracks());
-  observer_->VerifyLocalAudioTrack(kStreams[0], kAudioTracks[0], 1);
-  observer_->VerifyLocalVideoTrack(kStreams[0], kVideoTracks[0], 2);
-
-  // Change the ssrc of the audio and video track.
-  std::string sdp;
-  desc->ToString(&sdp);
-  std::string ssrc_org = "a=ssrc:1";
-  std::string ssrc_to = "a=ssrc:97";
-  rtc::replace_substrs(ssrc_org.c_str(), ssrc_org.length(),
-                             ssrc_to.c_str(), ssrc_to.length(),
-                             &sdp);
-  ssrc_org = "a=ssrc:2";
-  ssrc_to = "a=ssrc:98";
-  rtc::replace_substrs(ssrc_org.c_str(), ssrc_org.length(),
-                             ssrc_to.c_str(), ssrc_to.length(),
-                             &sdp);
-  rtc::scoped_ptr<SessionDescriptionInterface> updated_desc(
-      webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer,
-                                       sdp, NULL));
-
-  signaling_->OnLocalDescriptionChanged(updated_desc.get());
-  EXPECT_EQ(1u, observer_->NumberOfLocalAudioTracks());
-  EXPECT_EQ(1u, observer_->NumberOfLocalVideoTracks());
-  observer_->VerifyLocalAudioTrack(kStreams[0], kAudioTracks[0], 97);
-  observer_->VerifyLocalVideoTrack(kStreams[0], kVideoTracks[0], 98);
-}
-
-// This test that the correct MediaStreamSignalingObserver methods are called
-// if a new session description is set with the same tracks but they are now
-// sent on a another MediaStream.
-TEST_F(MediaStreamSignalingTest, SignalSameTracksInSeparateMediaStream) {
-  rtc::scoped_ptr<SessionDescriptionInterface> desc;
-  CreateSessionDescriptionAndReference(1, 1, desc.use());
-
-  signaling_->AddLocalStream(reference_collection_->at(0));
-  signaling_->OnLocalDescriptionChanged(desc.get());
-  EXPECT_EQ(1u, observer_->NumberOfLocalAudioTracks());
-  EXPECT_EQ(1u, observer_->NumberOfLocalVideoTracks());
-
-  std::string stream_label_0 = kStreams[0];
-  observer_->VerifyLocalAudioTrack(stream_label_0, kAudioTracks[0], 1);
-  observer_->VerifyLocalVideoTrack(stream_label_0, kVideoTracks[0], 2);
-
-  // Add a new MediaStream but with the same tracks as in the first stream.
-  std::string stream_label_1 = kStreams[1];
-  rtc::scoped_refptr<webrtc::MediaStreamInterface> stream_1(
-      webrtc::MediaStream::Create(kStreams[1]));
-  stream_1->AddTrack(reference_collection_->at(0)->GetVideoTracks()[0]);
-  stream_1->AddTrack(reference_collection_->at(0)->GetAudioTracks()[0]);
-  signaling_->AddLocalStream(stream_1);
-
-  // Replace msid in the original SDP.
-  std::string sdp;
-  desc->ToString(&sdp);
-  rtc::replace_substrs(
-      kStreams[0], strlen(kStreams[0]), kStreams[1], strlen(kStreams[1]), &sdp);
-
-  rtc::scoped_ptr<SessionDescriptionInterface> updated_desc(
-      webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer,
-                                       sdp, NULL));
-
-  signaling_->OnLocalDescriptionChanged(updated_desc.get());
-  observer_->VerifyLocalAudioTrack(kStreams[1], kAudioTracks[0], 1);
-  observer_->VerifyLocalVideoTrack(kStreams[1], kVideoTracks[0], 2);
-  EXPECT_EQ(1u, observer_->NumberOfLocalAudioTracks());
-  EXPECT_EQ(1u, observer_->NumberOfLocalVideoTracks());
-}
-
-// Verifies that an even SCTP id is allocated for SSL_CLIENT and an odd id for
-// SSL_SERVER.
-TEST_F(MediaStreamSignalingTest, SctpIdAllocationBasedOnRole) {
-  int id;
-  ASSERT_TRUE(signaling_->AllocateSctpSid(rtc::SSL_SERVER, &id));
-  EXPECT_EQ(1, id);
-  ASSERT_TRUE(signaling_->AllocateSctpSid(rtc::SSL_CLIENT, &id));
-  EXPECT_EQ(0, id);
-  ASSERT_TRUE(signaling_->AllocateSctpSid(rtc::SSL_SERVER, &id));
-  EXPECT_EQ(3, id);
-  ASSERT_TRUE(signaling_->AllocateSctpSid(rtc::SSL_CLIENT, &id));
-  EXPECT_EQ(2, id);
-}
-
-// Verifies that SCTP ids of existing DataChannels are not reused.
-TEST_F(MediaStreamSignalingTest, SctpIdAllocationNoReuse) {
-  int old_id = 1;
-  AddDataChannel(cricket::DCT_SCTP, "a", old_id);
-
-  int new_id;
-  ASSERT_TRUE(signaling_->AllocateSctpSid(rtc::SSL_SERVER, &new_id));
-  EXPECT_NE(old_id, new_id);
-
-  // Creates a DataChannel with id 0.
-  old_id = 0;
-  AddDataChannel(cricket::DCT_SCTP, "a", old_id);
-  ASSERT_TRUE(signaling_->AllocateSctpSid(rtc::SSL_CLIENT, &new_id));
-  EXPECT_NE(old_id, new_id);
-}
-
-// Verifies that SCTP ids of removed DataChannels can be reused.
-TEST_F(MediaStreamSignalingTest, SctpIdReusedForRemovedDataChannel) {
-  int odd_id = 1;
-  int even_id = 0;
-  AddDataChannel(cricket::DCT_SCTP, "a", odd_id);
-  AddDataChannel(cricket::DCT_SCTP, "a", even_id);
-
-  int allocated_id = -1;
-  ASSERT_TRUE(signaling_->AllocateSctpSid(rtc::SSL_SERVER,
-                                          &allocated_id));
-  EXPECT_EQ(odd_id + 2, allocated_id);
-  AddDataChannel(cricket::DCT_SCTP, "a", allocated_id);
-
-  ASSERT_TRUE(signaling_->AllocateSctpSid(rtc::SSL_CLIENT,
-                                          &allocated_id));
-  EXPECT_EQ(even_id + 2, allocated_id);
-  AddDataChannel(cricket::DCT_SCTP, "a", allocated_id);
-
-  signaling_->RemoveSctpDataChannel(odd_id);
-  signaling_->RemoveSctpDataChannel(even_id);
-
-  // Verifies that removed DataChannel ids are reused.
-  ASSERT_TRUE(signaling_->AllocateSctpSid(rtc::SSL_SERVER,
-                                          &allocated_id));
-  EXPECT_EQ(odd_id, allocated_id);
-
-  ASSERT_TRUE(signaling_->AllocateSctpSid(rtc::SSL_CLIENT,
-                                          &allocated_id));
-  EXPECT_EQ(even_id, allocated_id);
-
-  // Verifies that used higher DataChannel ids are not reused.
-  ASSERT_TRUE(signaling_->AllocateSctpSid(rtc::SSL_SERVER,
-                                          &allocated_id));
-  EXPECT_NE(odd_id + 2, allocated_id);
-
-  ASSERT_TRUE(signaling_->AllocateSctpSid(rtc::SSL_CLIENT,
-                                          &allocated_id));
-  EXPECT_NE(even_id + 2, allocated_id);
-
-}
-
-// Verifies that duplicated label is not allowed for RTP data channel.
-TEST_F(MediaStreamSignalingTest, RtpDuplicatedLabelNotAllowed) {
-  AddDataChannel(cricket::DCT_RTP, "a", -1);
-
-  webrtc::InternalDataChannelInit config;
-  rtc::scoped_refptr<webrtc::DataChannel> data_channel =
-      webrtc::DataChannel::Create(
-          data_channel_provider_.get(), cricket::DCT_RTP, "a", config);
-  ASSERT_TRUE(data_channel.get() != NULL);
-  EXPECT_FALSE(signaling_->AddDataChannel(data_channel.get()));
-}
-
-// Verifies that duplicated label is allowed for SCTP data channel.
-TEST_F(MediaStreamSignalingTest, SctpDuplicatedLabelAllowed) {
-  AddDataChannel(cricket::DCT_SCTP, "a", -1);
-  AddDataChannel(cricket::DCT_SCTP, "a", -1);
-}
-
-// Verifies the correct configuration is used to create DataChannel from an OPEN
-// message.
-TEST_F(MediaStreamSignalingTest, CreateDataChannelFromOpenMessage) {
-  FakeDataChannelFactory fake_factory(data_channel_provider_.get(),
-                                      cricket::DCT_SCTP,
-                                      signaling_.get());
-  signaling_->SetDataChannelFactory(&fake_factory);
-  webrtc::DataChannelInit config;
-  config.id = 1;
-  rtc::Buffer payload;
-  webrtc::WriteDataChannelOpenMessage("a", config, &payload);
-  cricket::ReceiveDataParams params;
-  params.ssrc = config.id;
-  EXPECT_TRUE(signaling_->AddDataChannelFromOpenMessage(params, payload));
-  EXPECT_EQ(config.id, fake_factory.last_init().id);
-  EXPECT_FALSE(fake_factory.last_init().negotiated);
-  EXPECT_EQ(webrtc::InternalDataChannelInit::kAcker,
-            fake_factory.last_init().open_handshake_role);
-}
-
-// Verifies that duplicated label from OPEN message is allowed.
-TEST_F(MediaStreamSignalingTest, DuplicatedLabelFromOpenMessageAllowed) {
-  AddDataChannel(cricket::DCT_SCTP, "a", -1);
-
-  FakeDataChannelFactory fake_factory(data_channel_provider_.get(),
-                                      cricket::DCT_SCTP,
-                                      signaling_.get());
-  signaling_->SetDataChannelFactory(&fake_factory);
-  webrtc::DataChannelInit config;
-  config.id = 0;
-  rtc::Buffer payload;
-  webrtc::WriteDataChannelOpenMessage("a", config, &payload);
-  cricket::ReceiveDataParams params;
-  params.ssrc = config.id;
-  EXPECT_TRUE(signaling_->AddDataChannelFromOpenMessage(params, payload));
-}
-
-// Verifies that a DataChannel closed remotely is closed locally.
-TEST_F(MediaStreamSignalingTest,
-       SctpDataChannelClosedLocallyWhenClosedRemotely) {
-  webrtc::InternalDataChannelInit config;
-  config.id = 0;
-
-  rtc::scoped_refptr<webrtc::DataChannel> data_channel =
-      webrtc::DataChannel::Create(
-          data_channel_provider_.get(), cricket::DCT_SCTP, "a", config);
-  ASSERT_TRUE(data_channel.get() != NULL);
-  EXPECT_EQ(webrtc::DataChannelInterface::kConnecting,
-            data_channel->state());
-
-  EXPECT_TRUE(signaling_->AddDataChannel(data_channel.get()));
-
-  signaling_->OnRemoteSctpDataChannelClosed(config.id);
-  EXPECT_EQ(webrtc::DataChannelInterface::kClosed, data_channel->state());
-}
-
-// Verifies that DataChannel added from OPEN message is added to
-// MediaStreamSignaling only once (webrtc issue 3778).
-TEST_F(MediaStreamSignalingTest, DataChannelFromOpenMessageAddedOnce) {
-  FakeDataChannelFactory fake_factory(data_channel_provider_.get(),
-                                      cricket::DCT_SCTP,
-                                      signaling_.get());
-  signaling_->SetDataChannelFactory(&fake_factory);
-  webrtc::DataChannelInit config;
-  config.id = 1;
-  rtc::Buffer payload;
-  webrtc::WriteDataChannelOpenMessage("a", config, &payload);
-  cricket::ReceiveDataParams params;
-  params.ssrc = config.id;
-  EXPECT_TRUE(signaling_->AddDataChannelFromOpenMessage(params, payload));
-  EXPECT_TRUE(signaling_->HasDataChannels());
-
-  // Removes the DataChannel and verifies that no DataChannel is left.
-  signaling_->RemoveSctpDataChannel(config.id);
-  EXPECT_FALSE(signaling_->HasDataChannels());
-}
diff --git a/talk/app/webrtc/peerconnection.cc b/talk/app/webrtc/peerconnection.cc
index 86902b0..44a231e 100644
--- a/talk/app/webrtc/peerconnection.cc
+++ b/talk/app/webrtc/peerconnection.cc
@@ -30,22 +30,36 @@
 #include <vector>
 #include <cctype>  // for isdigit
 
+#include "talk/app/webrtc/audiotrack.h"
 #include "talk/app/webrtc/dtmfsender.h"
 #include "talk/app/webrtc/jsepicecandidate.h"
 #include "talk/app/webrtc/jsepsessiondescription.h"
 #include "talk/app/webrtc/mediaconstraintsinterface.h"
+#include "talk/app/webrtc/mediastream.h"
+#include "talk/app/webrtc/mediastreamproxy.h"
+#include "talk/app/webrtc/mediastreamtrackproxy.h"
+#include "talk/app/webrtc/remoteaudiosource.h"
+#include "talk/app/webrtc/remotevideocapturer.h"
 #include "talk/app/webrtc/rtpreceiver.h"
 #include "talk/app/webrtc/rtpsender.h"
 #include "talk/app/webrtc/streamcollection.h"
+#include "talk/app/webrtc/videosource.h"
+#include "talk/app/webrtc/videotrack.h"
+#include "talk/media/sctp/sctpdataengine.h"
 #include "webrtc/p2p/client/basicportallocator.h"
 #include "talk/session/media/channelmanager.h"
 #include "webrtc/base/logging.h"
 #include "webrtc/base/stringencode.h"
+#include "webrtc/base/stringutils.h"
 #include "webrtc/system_wrappers/interface/field_trial.h"
 
 namespace {
 
+using webrtc::DataChannel;
+using webrtc::MediaConstraintsInterface;
+using webrtc::MediaStreamInterface;
 using webrtc::PeerConnectionInterface;
+using webrtc::StreamCollection;
 using webrtc::StunConfigurations;
 using webrtc::TurnConfigurations;
 typedef webrtc::PortAllocatorFactoryInterface::StunConfiguration
@@ -53,6 +67,10 @@
 typedef webrtc::PortAllocatorFactoryInterface::TurnConfiguration
     TurnConfiguration;
 
+static const char kDefaultStreamLabel[] = "default";
+static const char kDefaultAudioTrackLabel[] = "defaulta0";
+static const char kDefaultVideoTrackLabel[] = "defaultv0";
+
 // The min number of tokens must present in Turn host uri.
 // e.g. user@turn.example.org
 static const size_t kTurnHostTokensNum = 2;
@@ -84,6 +102,7 @@
 enum {
   MSG_SET_SESSIONDESCRIPTION_SUCCESS = 0,
   MSG_SET_SESSIONDESCRIPTION_FAILED,
+  MSG_CREATE_SESSIONDESCRIPTION_FAILED,
   MSG_GETSTATS,
 };
 
@@ -97,6 +116,15 @@
   std::string error;
 };
 
+struct CreateSessionDescriptionMsg : public rtc::MessageData {
+  explicit CreateSessionDescriptionMsg(
+      webrtc::CreateSessionDescriptionObserver* observer)
+      : observer(observer) {}
+
+  rtc::scoped_refptr<webrtc::CreateSessionDescriptionObserver> observer;
+  std::string error;
+};
+
 struct GetStatsMsg : public rtc::MessageData {
   GetStatsMsg(webrtc::StatsObserver* observer,
               webrtc::MediaStreamTrackInterface* track)
@@ -302,10 +330,210 @@
   return true;
 }
 
+// Check if we can send |new_stream| on a PeerConnection.
+bool CanAddLocalMediaStream(webrtc::StreamCollectionInterface* current_streams,
+                            webrtc::MediaStreamInterface* new_stream) {
+  if (!new_stream || !current_streams) {
+    return false;
+  }
+  if (current_streams->find(new_stream->label()) != nullptr) {
+    LOG(LS_ERROR) << "MediaStream with label " << new_stream->label()
+                  << " is already added.";
+    return false;
+  }
+  return true;
+}
+
+bool MediaContentDirectionHasSend(cricket::MediaContentDirection dir) {
+  return dir == cricket::MD_SENDONLY || dir == cricket::MD_SENDRECV;
+}
+
+bool IsValidOfferToReceiveMedia(int value) {
+  typedef PeerConnectionInterface::RTCOfferAnswerOptions Options;
+  return (value >= Options::kUndefined) &&
+         (value <= Options::kMaxOfferToReceiveMedia);
+}
+
+// Add the stream and RTP data channel info to |session_options|.
+void SetStreams(cricket::MediaSessionOptions* session_options,
+                rtc::scoped_refptr<StreamCollection> streams,
+                const std::map<std::string, rtc::scoped_refptr<DataChannel>>&
+                    rtp_data_channels) {
+  session_options->streams.clear();
+  if (streams != nullptr) {
+    for (size_t i = 0; i < streams->count(); ++i) {
+      MediaStreamInterface* stream = streams->at(i);
+      // For each audio track in the stream, add it to the MediaSessionOptions.
+      for (const auto& track : stream->GetAudioTracks()) {
+        session_options->AddSendStream(cricket::MEDIA_TYPE_AUDIO, track->id(),
+                                       stream->label());
+      }
+      // For each video track in the stream, add it to the MediaSessionOptions.
+      for (const auto& track : stream->GetVideoTracks()) {
+        session_options->AddSendStream(cricket::MEDIA_TYPE_VIDEO, track->id(),
+                                       stream->label());
+      }
+    }
+  }
+
+  // Check for data channels.
+  for (const auto& kv : rtp_data_channels) {
+    const DataChannel* channel = kv.second;
+    if (channel->state() == DataChannel::kConnecting ||
+        channel->state() == DataChannel::kOpen) {
+      // |streamid| and |sync_label| are both set to the DataChannel label
+      // here so they can be signaled the same way as MediaStreams and Tracks.
+      // For MediaStreams, the sync_label is the MediaStream label and the
+      // track label is the same as |streamid|.
+      const std::string& streamid = channel->label();
+      const std::string& sync_label = channel->label();
+      session_options->AddSendStream(cricket::MEDIA_TYPE_DATA, streamid,
+                                     sync_label);
+    }
+  }
+}
+
 }  // namespace
 
 namespace webrtc {
 
+// Factory class for creating remote MediaStreams and MediaStreamTracks.
+class RemoteMediaStreamFactory {
+ public:
+  explicit RemoteMediaStreamFactory(rtc::Thread* signaling_thread,
+                                    cricket::ChannelManager* channel_manager)
+      : signaling_thread_(signaling_thread),
+        channel_manager_(channel_manager) {}
+
+  rtc::scoped_refptr<MediaStreamInterface> CreateMediaStream(
+      const std::string& stream_label) {
+    return MediaStreamProxy::Create(signaling_thread_,
+                                    MediaStream::Create(stream_label));
+  }
+
+  AudioTrackInterface* AddAudioTrack(webrtc::MediaStreamInterface* stream,
+                                     const std::string& track_id) {
+    return AddTrack<AudioTrackInterface, AudioTrack, AudioTrackProxy>(
+        stream, track_id, RemoteAudioSource::Create().get());
+  }
+
+  VideoTrackInterface* AddVideoTrack(webrtc::MediaStreamInterface* stream,
+                                     const std::string& track_id) {
+    return AddTrack<VideoTrackInterface, VideoTrack, VideoTrackProxy>(
+        stream, track_id,
+        VideoSource::Create(channel_manager_, new RemoteVideoCapturer(),
+                            nullptr)
+            .get());
+  }
+
+ private:
+  template <typename TI, typename T, typename TP, typename S>
+  TI* AddTrack(MediaStreamInterface* stream,
+               const std::string& track_id,
+               S* source) {
+    rtc::scoped_refptr<TI> track(
+        TP::Create(signaling_thread_, T::Create(track_id, source)));
+    track->set_state(webrtc::MediaStreamTrackInterface::kLive);
+    if (stream->AddTrack(track)) {
+      return track;
+    }
+    return nullptr;
+  }
+
+  rtc::Thread* signaling_thread_;
+  cricket::ChannelManager* channel_manager_;
+};
+
+bool ConvertRtcOptionsForOffer(
+    const PeerConnectionInterface::RTCOfferAnswerOptions& rtc_options,
+    cricket::MediaSessionOptions* session_options) {
+  typedef PeerConnectionInterface::RTCOfferAnswerOptions RTCOfferAnswerOptions;
+  if (!IsValidOfferToReceiveMedia(rtc_options.offer_to_receive_audio) ||
+      !IsValidOfferToReceiveMedia(rtc_options.offer_to_receive_video)) {
+    return false;
+  }
+
+  // According to the spec, offer to receive audio/video if the constraint is
+  // not set and there are send streams.
+  if (rtc_options.offer_to_receive_audio == RTCOfferAnswerOptions::kUndefined) {
+    session_options->recv_audio =
+        session_options->HasSendMediaStream(cricket::MEDIA_TYPE_AUDIO);
+  } else {
+    session_options->recv_audio = (rtc_options.offer_to_receive_audio > 0);
+  }
+  if (rtc_options.offer_to_receive_video == RTCOfferAnswerOptions::kUndefined) {
+    session_options->recv_video =
+        session_options->HasSendMediaStream(cricket::MEDIA_TYPE_VIDEO);
+  } else {
+    session_options->recv_video = (rtc_options.offer_to_receive_video > 0);
+  }
+
+  session_options->vad_enabled = rtc_options.voice_activity_detection;
+  session_options->transport_options.ice_restart = rtc_options.ice_restart;
+  session_options->bundle_enabled =
+      rtc_options.use_rtp_mux &&
+      (session_options->has_audio() || session_options->has_video() ||
+       session_options->has_data());
+
+  return true;
+}
+
+bool ParseConstraintsForAnswer(const MediaConstraintsInterface* constraints,
+                               cricket::MediaSessionOptions* session_options) {
+  bool value = false;
+  size_t mandatory_constraints_satisfied = 0;
+
+  // kOfferToReceiveAudio defaults to true according to spec.
+  if (!FindConstraint(constraints,
+                      MediaConstraintsInterface::kOfferToReceiveAudio, &value,
+                      &mandatory_constraints_satisfied) ||
+      value) {
+    session_options->recv_audio = true;
+  }
+
+  // kOfferToReceiveVideo defaults to false according to spec. But
+  // if it is an answer and video is offered, we should still accept video
+  // per default.
+  value = false;
+  if (!FindConstraint(constraints,
+                      MediaConstraintsInterface::kOfferToReceiveVideo, &value,
+                      &mandatory_constraints_satisfied) ||
+      value) {
+    session_options->recv_video = true;
+  }
+
+  if (FindConstraint(constraints,
+                     MediaConstraintsInterface::kVoiceActivityDetection, &value,
+                     &mandatory_constraints_satisfied)) {
+    session_options->vad_enabled = value;
+  }
+
+  if (FindConstraint(constraints, MediaConstraintsInterface::kUseRtpMux, &value,
+                     &mandatory_constraints_satisfied)) {
+    session_options->bundle_enabled = value;
+  } else {
+    // kUseRtpMux defaults to true according to spec.
+    session_options->bundle_enabled = true;
+  }
+  session_options->bundle_enabled =
+      session_options->bundle_enabled &&
+      (session_options->has_audio() || session_options->has_video() ||
+       session_options->has_data());
+
+  if (FindConstraint(constraints, MediaConstraintsInterface::kIceRestart,
+                     &value, &mandatory_constraints_satisfied)) {
+    session_options->transport_options.ice_restart = value;
+  } else {
+    // kIceRestart defaults to false according to spec.
+    session_options->transport_options.ice_restart = false;
+  }
+
+  if (!constraints) {
+    return true;
+  }
+  return mandatory_constraints_satisfied == constraints->GetMandatory().size();
+}
+
 bool ParseIceServers(const PeerConnectionInterface::IceServers& servers,
                      StunConfigurations* stun_config,
                      TurnConfigurations* turn_config) {
@@ -333,22 +561,6 @@
   return true;
 }
 
-// Check if we can send |new_stream| on a PeerConnection.
-// Currently only one audio but multiple video track is supported per
-// PeerConnection.
-bool CanAddLocalMediaStream(webrtc::StreamCollectionInterface* current_streams,
-                            webrtc::MediaStreamInterface* new_stream) {
-  if (!new_stream || !current_streams)
-    return false;
-  if (current_streams->find(new_stream->label()) != NULL) {
-    LOG(LS_ERROR) << "MediaStream with label " << new_stream->label()
-                  << " is already added.";
-    return false;
-  }
-
-  return true;
-}
-
 PeerConnection::PeerConnection(PeerConnectionFactory* factory)
     : factory_(factory),
       observer_(NULL),
@@ -356,14 +568,12 @@
       signaling_state_(kStable),
       ice_state_(kIceNew),
       ice_connection_state_(kIceConnectionNew),
-      ice_gathering_state_(kIceGatheringNew) {
-}
+      ice_gathering_state_(kIceGatheringNew),
+      local_streams_(StreamCollection::Create()),
+      remote_streams_(StreamCollection::Create()) {}
 
 PeerConnection::~PeerConnection() {
   RTC_DCHECK(signaling_thread()->IsCurrent());
-  if (mediastream_signaling_) {
-    mediastream_signaling_->TearDown();
-  }
   // Need to detach RTP senders/receivers from WebRtcSession,
   // since it's about to be destroyed.
   for (const auto& sender : senders_) {
@@ -380,9 +590,10 @@
     PortAllocatorFactoryInterface* allocator_factory,
     rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
     PeerConnectionObserver* observer) {
-  RTC_DCHECK(observer != NULL);
-  if (!observer)
+  RTC_DCHECK(observer != nullptr);
+  if (!observer) {
     return false;
+  }
   observer_ = observer;
 
   std::vector<PortAllocatorFactoryInterface::StunConfiguration> stun_config;
@@ -400,8 +611,8 @@
                          cricket::PORTALLOCATOR_ENABLE_IPV6;
   bool value;
   // If IPv6 flag was specified, we'll not override it by experiment.
-  if (FindConstraint(
-          constraints, MediaConstraintsInterface::kEnableIPv6, &value, NULL)) {
+  if (FindConstraint(constraints, MediaConstraintsInterface::kEnableIPv6,
+                     &value, nullptr)) {
     if (!value) {
       portallocator_flags &= ~(cricket::PORTALLOCATOR_ENABLE_IPV6);
     }
@@ -419,36 +630,45 @@
   // No step delay is used while allocating ports.
   port_allocator_->set_step_delay(cricket::kMinimumStepDelay);
 
-  mediastream_signaling_.reset(new MediaStreamSignaling(
-      factory_->signaling_thread(), this, factory_->channel_manager()));
+  remote_stream_factory_.reset(new RemoteMediaStreamFactory(
+      factory_->signaling_thread(), factory_->channel_manager()));
 
-  session_.reset(new WebRtcSession(factory_->channel_manager(),
-                                   factory_->signaling_thread(),
-                                   factory_->worker_thread(),
-                                   port_allocator_.get(),
-                                   mediastream_signaling_.get()));
-  stats_.reset(new StatsCollector(session_.get()));
+  session_.reset(new WebRtcSession(
+      factory_->channel_manager(), factory_->signaling_thread(),
+      factory_->worker_thread(), port_allocator_.get()));
+  stats_.reset(new StatsCollector(this));
 
   // Initialize the WebRtcSession. It creates transport channels etc.
   if (!session_->Initialize(factory_->options(), constraints,
-                            dtls_identity_store.Pass(), configuration))
+                            dtls_identity_store.Pass(), configuration)) {
     return false;
+  }
 
   // Register PeerConnection as receiver of local ice candidates.
   // All the callbacks will be posted to the application from PeerConnection.
   session_->RegisterIceObserver(this);
   session_->SignalState.connect(this, &PeerConnection::OnSessionStateChange);
+  session_->SignalVoiceChannelDestroyed.connect(
+      this, &PeerConnection::OnVoiceChannelDestroyed);
+  session_->SignalVideoChannelDestroyed.connect(
+      this, &PeerConnection::OnVideoChannelDestroyed);
+  session_->SignalDataChannelCreated.connect(
+      this, &PeerConnection::OnDataChannelCreated);
+  session_->SignalDataChannelDestroyed.connect(
+      this, &PeerConnection::OnDataChannelDestroyed);
+  session_->SignalDataChannelOpenMessage.connect(
+      this, &PeerConnection::OnDataChannelOpenMessage);
   return true;
 }
 
 rtc::scoped_refptr<StreamCollectionInterface>
 PeerConnection::local_streams() {
-  return mediastream_signaling_->local_streams();
+  return local_streams_;
 }
 
 rtc::scoped_refptr<StreamCollectionInterface>
 PeerConnection::remote_streams() {
-  return mediastream_signaling_->remote_streams();
+  return remote_streams_;
 }
 
 // TODO(deadbeef): Create RtpSenders immediately here, even if local
@@ -457,20 +677,57 @@
   if (IsClosed()) {
     return false;
   }
-  if (!CanAddLocalMediaStream(mediastream_signaling_->local_streams(),
-                              local_stream))
-    return false;
-
-  if (!mediastream_signaling_->AddLocalStream(local_stream)) {
+  if (!CanAddLocalMediaStream(local_streams_, local_stream)) {
     return false;
   }
+
+  local_streams_->AddStream(local_stream);
+
+  // Find tracks that have already been configured in SDP. This can occur if a
+  // local session description that contains the MSID of these tracks is set
+  // before AddLocalStream is called. It can also occur if the local session
+  // description is not changed and RemoveLocalStream  is called and later
+  // AddLocalStream is called again with the same stream.
+  for (const auto& track : local_stream->GetAudioTracks()) {
+    const TrackInfo* track_info =
+        FindTrackInfo(local_audio_tracks_, local_stream->label(), track->id());
+    if (track_info) {
+      CreateAudioSender(local_stream, track.get(), track_info->ssrc);
+    }
+  }
+  for (const auto& track : local_stream->GetVideoTracks()) {
+    const TrackInfo* track_info =
+        FindTrackInfo(local_video_tracks_, local_stream->label(), track->id());
+    if (track_info) {
+      CreateVideoSender(local_stream, track.get(), track_info->ssrc);
+    }
+  }
+
   stats_->AddStream(local_stream);
   observer_->OnRenegotiationNeeded();
   return true;
 }
 
+// TODO(deadbeef): Don't destroy RtpSenders here; they should be kept around
+// indefinitely.
 void PeerConnection::RemoveStream(MediaStreamInterface* local_stream) {
-  mediastream_signaling_->RemoveLocalStream(local_stream);
+  for (const auto& track : local_stream->GetAudioTracks()) {
+    const TrackInfo* track_info =
+        FindTrackInfo(local_audio_tracks_, local_stream->label(), track->id());
+    if (track_info) {
+      DestroyAudioSender(local_stream, track.get(), track_info->ssrc);
+    }
+  }
+  for (const auto& track : local_stream->GetVideoTracks()) {
+    const TrackInfo* track_info =
+        FindTrackInfo(local_video_tracks_, local_stream->label(), track->id());
+    if (track_info) {
+      DestroyVideoSender(local_stream, track.get());
+    }
+  }
+
+  local_streams_->RemoveStream(local_stream);
+
   if (IsClosed()) {
     return;
   }
@@ -483,7 +740,7 @@
     LOG(LS_ERROR) << "CreateDtmfSender - track is NULL.";
     return NULL;
   }
-  if (!mediastream_signaling_->local_streams()->FindAudioTrack(track->id())) {
+  if (!local_streams_->FindAudioTrack(track->id())) {
     LOG(LS_ERROR) << "CreateDtmfSender is called with a non local audio track.";
     return NULL;
   }
@@ -553,16 +810,17 @@
 PeerConnection::CreateDataChannel(
     const std::string& label,
     const DataChannelInit* config) {
-  bool first_datachannel = !mediastream_signaling_->HasDataChannels();
+  bool first_datachannel = !HasDataChannels();
 
   rtc::scoped_ptr<InternalDataChannelInit> internal_config;
   if (config) {
     internal_config.reset(new InternalDataChannelInit(*config));
   }
   rtc::scoped_refptr<DataChannelInterface> channel(
-      session_->CreateDataChannel(label, internal_config.get()));
-  if (!channel.get())
-    return NULL;
+      InternalCreateDataChannel(label, internal_config.get()));
+  if (!channel.get()) {
+    return nullptr;
+  }
 
   // Trigger the onRenegotiationNeeded event for every new RTP DataChannel, or
   // the first SCTP DataChannel.
@@ -575,7 +833,7 @@
 
 void PeerConnection::CreateOffer(CreateSessionDescriptionObserver* observer,
                                  const MediaConstraintsInterface* constraints) {
-  if (!VERIFY(observer != NULL)) {
+  if (!VERIFY(observer != nullptr)) {
     LOG(LS_ERROR) << "CreateOffer - observer is NULL.";
     return;
   }
@@ -626,27 +884,45 @@
 
 void PeerConnection::CreateOffer(CreateSessionDescriptionObserver* observer,
                                  const RTCOfferAnswerOptions& options) {
-  if (!VERIFY(observer != NULL)) {
+  if (!VERIFY(observer != nullptr)) {
     LOG(LS_ERROR) << "CreateOffer - observer is NULL.";
     return;
   }
-  session_->CreateOffer(observer, options);
+
+  cricket::MediaSessionOptions session_options;
+  if (!GetOptionsForOffer(options, &session_options)) {
+    std::string error = "CreateOffer called with invalid options.";
+    LOG(LS_ERROR) << error;
+    PostCreateSessionDescriptionFailure(observer, error);
+    return;
+  }
+
+  session_->CreateOffer(observer, options, session_options);
 }
 
 void PeerConnection::CreateAnswer(
     CreateSessionDescriptionObserver* observer,
     const MediaConstraintsInterface* constraints) {
-  if (!VERIFY(observer != NULL)) {
+  if (!VERIFY(observer != nullptr)) {
     LOG(LS_ERROR) << "CreateAnswer - observer is NULL.";
     return;
   }
-  session_->CreateAnswer(observer, constraints);
+
+  cricket::MediaSessionOptions session_options;
+  if (!GetOptionsForAnswer(constraints, &session_options)) {
+    std::string error = "CreateAnswer called with invalid constraints.";
+    LOG(LS_ERROR) << error;
+    PostCreateSessionDescriptionFailure(observer, error);
+    return;
+  }
+
+  session_->CreateAnswer(observer, constraints, session_options);
 }
 
 void PeerConnection::SetLocalDescription(
     SetSessionDescriptionObserver* observer,
     SessionDescriptionInterface* desc) {
-  if (!VERIFY(observer != NULL)) {
+  if (!VERIFY(observer != nullptr)) {
     LOG(LS_ERROR) << "SetLocalDescription - observer is NULL.";
     return;
   }
@@ -662,8 +938,50 @@
     PostSetSessionDescriptionFailure(observer, error);
     return;
   }
-  SetSessionDescriptionMsg* msg =  new SetSessionDescriptionMsg(observer);
+
+  // If setting the description decided our SSL role, allocate any necessary
+  // SCTP sids.
+  rtc::SSLRole role;
+  if (session_->data_channel_type() == cricket::DCT_SCTP &&
+      session_->GetSslRole(&role)) {
+    AllocateSctpSids(role);
+  }
+
+  // Update state and SSRC of local MediaStreams and DataChannels based on the
+  // local session description.
+  const cricket::ContentInfo* audio_content =
+      GetFirstAudioContent(desc->description());
+  if (audio_content) {
+    const cricket::AudioContentDescription* audio_desc =
+        static_cast<const cricket::AudioContentDescription*>(
+            audio_content->description);
+    UpdateLocalTracks(audio_desc->streams(), audio_desc->type());
+  }
+
+  const cricket::ContentInfo* video_content =
+      GetFirstVideoContent(desc->description());
+  if (video_content) {
+    const cricket::VideoContentDescription* video_desc =
+        static_cast<const cricket::VideoContentDescription*>(
+            video_content->description);
+    UpdateLocalTracks(video_desc->streams(), video_desc->type());
+  }
+
+  const cricket::ContentInfo* data_content =
+      GetFirstDataContent(desc->description());
+  if (data_content) {
+    const cricket::DataContentDescription* data_desc =
+        static_cast<const cricket::DataContentDescription*>(
+            data_content->description);
+    if (rtc::starts_with(data_desc->protocol().data(),
+                         cricket::kMediaProtocolRtpPrefix)) {
+      UpdateLocalRtpDataChannels(data_desc->streams());
+    }
+  }
+
+  SetSessionDescriptionMsg* msg = new SetSessionDescriptionMsg(observer);
   signaling_thread()->Post(this, MSG_SET_SESSIONDESCRIPTION_SUCCESS, msg);
+
   // MaybeStartGathering needs to be called after posting
   // MSG_SET_SESSIONDESCRIPTION_SUCCESS, so that we don't signal any candidates
   // before signaling that SetLocalDescription completed.
@@ -673,7 +991,7 @@
 void PeerConnection::SetRemoteDescription(
     SetSessionDescriptionObserver* observer,
     SessionDescriptionInterface* desc) {
-  if (!VERIFY(observer != NULL)) {
+  if (!VERIFY(observer != nullptr)) {
     LOG(LS_ERROR) << "SetRemoteDescription - observer is NULL.";
     return;
   }
@@ -689,16 +1007,78 @@
     PostSetSessionDescriptionFailure(observer, error);
     return;
   }
-  SetSessionDescriptionMsg* msg  = new SetSessionDescriptionMsg(observer);
-  signaling_thread()->Post(this, MSG_SET_SESSIONDESCRIPTION_SUCCESS, msg);
-}
 
-void PeerConnection::PostSetSessionDescriptionFailure(
-    SetSessionDescriptionObserver* observer,
-    const std::string& error) {
-  SetSessionDescriptionMsg* msg  = new SetSessionDescriptionMsg(observer);
-  msg->error = error;
-  signaling_thread()->Post(this, MSG_SET_SESSIONDESCRIPTION_FAILED, msg);
+  // If setting the description decided our SSL role, allocate any necessary
+  // SCTP sids.
+  rtc::SSLRole role;
+  if (session_->data_channel_type() == cricket::DCT_SCTP &&
+      session_->GetSslRole(&role)) {
+    AllocateSctpSids(role);
+  }
+
+  const cricket::SessionDescription* remote_desc = desc->description();
+
+  // We wait to signal new streams until we finish processing the description,
+  // since only at that point will new streams have all their tracks.
+  rtc::scoped_refptr<StreamCollection> new_streams(StreamCollection::Create());
+
+  // Find all audio rtp streams and create corresponding remote AudioTracks
+  // and MediaStreams.
+  const cricket::ContentInfo* audio_content = GetFirstAudioContent(remote_desc);
+  if (audio_content) {
+    const cricket::AudioContentDescription* desc =
+        static_cast<const cricket::AudioContentDescription*>(
+            audio_content->description);
+    UpdateRemoteStreamsList(desc->streams(), desc->type(), new_streams);
+    remote_info_.default_audio_track_needed =
+        MediaContentDirectionHasSend(desc->direction()) &&
+        desc->streams().empty();
+  }
+
+  // Find all video rtp streams and create corresponding remote VideoTracks
+  // and MediaStreams.
+  const cricket::ContentInfo* video_content = GetFirstVideoContent(remote_desc);
+  if (video_content) {
+    const cricket::VideoContentDescription* desc =
+        static_cast<const cricket::VideoContentDescription*>(
+            video_content->description);
+    UpdateRemoteStreamsList(desc->streams(), desc->type(), new_streams);
+    remote_info_.default_video_track_needed =
+        MediaContentDirectionHasSend(desc->direction()) &&
+        desc->streams().empty();
+  }
+
+  // Update the DataChannels with the information from the remote peer.
+  const cricket::ContentInfo* data_content = GetFirstDataContent(remote_desc);
+  if (data_content) {
+    const cricket::DataContentDescription* data_desc =
+        static_cast<const cricket::DataContentDescription*>(
+            data_content->description);
+    if (rtc::starts_with(data_desc->protocol().data(),
+                         cricket::kMediaProtocolRtpPrefix)) {
+      UpdateRemoteRtpDataChannels(data_desc->streams());
+    }
+  }
+
+  // Iterate new_streams and notify the observer about new MediaStreams.
+  for (size_t i = 0; i < new_streams->count(); ++i) {
+    MediaStreamInterface* new_stream = new_streams->at(i);
+    stats_->AddStream(new_stream);
+    observer_->OnAddStream(new_stream);
+  }
+
+  // Find removed MediaStreams.
+  if (remote_info_.IsDefaultMediaStreamNeeded() &&
+      remote_streams_->find(kDefaultStreamLabel) != nullptr) {
+    // The default media stream already exists. No need to do anything.
+  } else {
+    UpdateEndedRemoteMediaStreams();
+    remote_info_.msid_supported |= remote_streams_->count() > 0;
+  }
+  MaybeCreateDefaultStream();
+
+  SetSessionDescriptionMsg* msg = new SetSessionDescriptionMsg(observer);
+  signaling_thread()->Post(this, MSG_SET_SESSIONDESCRIPTION_SUCCESS, msg);
 }
 
 bool PeerConnection::SetConfiguration(const RTCConfiguration& config) {
@@ -832,6 +1212,13 @@
       delete param;
       break;
     }
+    case MSG_CREATE_SESSIONDESCRIPTION_FAILED: {
+      CreateSessionDescriptionMsg* param =
+          static_cast<CreateSessionDescriptionMsg*>(msg->pdata);
+      param->observer->OnFailure(param->error);
+      delete param;
+      break;
+    }
     case MSG_GETSTATS: {
       GetStatsMsg* param = static_cast<GetStatsMsg*>(msg->pdata);
       StatsReports reports;
@@ -846,37 +1233,22 @@
   }
 }
 
-void PeerConnection::OnAddRemoteStream(MediaStreamInterface* stream) {
-  stats_->AddStream(stream);
-  observer_->OnAddStream(stream);
-}
-
-void PeerConnection::OnRemoveRemoteStream(MediaStreamInterface* stream) {
-  observer_->OnRemoveStream(stream);
-}
-
-void PeerConnection::OnAddDataChannel(DataChannelInterface* data_channel) {
-  observer_->OnDataChannel(DataChannelProxy::Create(signaling_thread(),
-                                                    data_channel));
-}
-
-void PeerConnection::OnAddRemoteAudioTrack(MediaStreamInterface* stream,
-                                           AudioTrackInterface* audio_track,
-                                           uint32_t ssrc) {
+void PeerConnection::CreateAudioReceiver(MediaStreamInterface* stream,
+                                         AudioTrackInterface* audio_track,
+                                         uint32_t ssrc) {
   receivers_.push_back(new AudioRtpReceiver(audio_track, ssrc, session_.get()));
 }
 
-void PeerConnection::OnAddRemoteVideoTrack(MediaStreamInterface* stream,
-                                           VideoTrackInterface* video_track,
-                                           uint32_t ssrc) {
+void PeerConnection::CreateVideoReceiver(MediaStreamInterface* stream,
+                                         VideoTrackInterface* video_track,
+                                         uint32_t ssrc) {
   receivers_.push_back(new VideoRtpReceiver(video_track, ssrc, session_.get()));
 }
 
 // TODO(deadbeef): Keep RtpReceivers around even if track goes away in remote
 // description.
-void PeerConnection::OnRemoveRemoteAudioTrack(
-    MediaStreamInterface* stream,
-    AudioTrackInterface* audio_track) {
+void PeerConnection::DestroyAudioReceiver(MediaStreamInterface* stream,
+                                          AudioTrackInterface* audio_track) {
   auto it = FindReceiverForTrack(audio_track);
   if (it == receivers_.end()) {
     LOG(LS_WARNING) << "RtpReceiver for track with id " << audio_track->id()
@@ -887,9 +1259,8 @@
   }
 }
 
-void PeerConnection::OnRemoveRemoteVideoTrack(
-    MediaStreamInterface* stream,
-    VideoTrackInterface* video_track) {
+void PeerConnection::DestroyVideoReceiver(MediaStreamInterface* stream,
+                                          VideoTrackInterface* video_track) {
   auto it = FindReceiverForTrack(video_track);
   if (it == receivers_.end()) {
     LOG(LS_WARNING) << "RtpReceiver for track with id " << video_track->id()
@@ -900,24 +1271,24 @@
   }
 }
 
-void PeerConnection::OnAddLocalAudioTrack(MediaStreamInterface* stream,
-                                          AudioTrackInterface* audio_track,
-                                          uint32_t ssrc) {
+void PeerConnection::CreateAudioSender(MediaStreamInterface* stream,
+                                       AudioTrackInterface* audio_track,
+                                       uint32_t ssrc) {
   senders_.push_back(new AudioRtpSender(audio_track, ssrc, session_.get()));
   stats_->AddLocalAudioTrack(audio_track, ssrc);
 }
 
-void PeerConnection::OnAddLocalVideoTrack(MediaStreamInterface* stream,
-                                          VideoTrackInterface* video_track,
-                                          uint32_t ssrc) {
+void PeerConnection::CreateVideoSender(MediaStreamInterface* stream,
+                                       VideoTrackInterface* video_track,
+                                       uint32_t ssrc) {
   senders_.push_back(new VideoRtpSender(video_track, ssrc, session_.get()));
 }
 
 // TODO(deadbeef): Keep RtpSenders around even if track goes away in local
 // description.
-void PeerConnection::OnRemoveLocalAudioTrack(MediaStreamInterface* stream,
-                                             AudioTrackInterface* audio_track,
-                                             uint32_t ssrc) {
+void PeerConnection::DestroyAudioSender(MediaStreamInterface* stream,
+                                        AudioTrackInterface* audio_track,
+                                        uint32_t ssrc) {
   auto it = FindSenderForTrack(audio_track);
   if (it == senders_.end()) {
     LOG(LS_WARNING) << "RtpSender for track with id " << audio_track->id()
@@ -930,8 +1301,8 @@
   stats_->RemoveLocalAudioTrack(audio_track, ssrc);
 }
 
-void PeerConnection::OnRemoveLocalVideoTrack(MediaStreamInterface* stream,
-                                             VideoTrackInterface* video_track) {
+void PeerConnection::DestroyVideoSender(MediaStreamInterface* stream,
+                                        VideoTrackInterface* video_track) {
   auto it = FindSenderForTrack(video_track);
   if (it == senders_.end()) {
     LOG(LS_WARNING) << "RtpSender for track with id " << video_track->id()
@@ -943,15 +1314,12 @@
   }
 }
 
-void PeerConnection::OnRemoveLocalStream(MediaStreamInterface* stream) {
-}
-
 void PeerConnection::OnIceConnectionChange(
     PeerConnectionInterface::IceConnectionState new_state) {
   RTC_DCHECK(signaling_thread()->IsCurrent());
   // After transitioning to "closed", ignore any additional states from
   // WebRtcSession (such as "disconnected").
-  if (ice_connection_state_ == kIceConnectionClosed) {
+  if (IsClosed()) {
     return;
   }
   ice_connection_state_ = new_state;
@@ -998,6 +1366,540 @@
   observer_->OnStateChange(PeerConnectionObserver::kSignalingState);
 }
 
+void PeerConnection::PostSetSessionDescriptionFailure(
+    SetSessionDescriptionObserver* observer,
+    const std::string& error) {
+  SetSessionDescriptionMsg* msg = new SetSessionDescriptionMsg(observer);
+  msg->error = error;
+  signaling_thread()->Post(this, MSG_SET_SESSIONDESCRIPTION_FAILED, msg);
+}
+
+void PeerConnection::PostCreateSessionDescriptionFailure(
+    CreateSessionDescriptionObserver* observer,
+    const std::string& error) {
+  CreateSessionDescriptionMsg* msg = new CreateSessionDescriptionMsg(observer);
+  msg->error = error;
+  signaling_thread()->Post(this, MSG_CREATE_SESSIONDESCRIPTION_FAILED, msg);
+}
+
+bool PeerConnection::GetOptionsForOffer(
+    const PeerConnectionInterface::RTCOfferAnswerOptions& rtc_options,
+    cricket::MediaSessionOptions* session_options) {
+  SetStreams(session_options, local_streams_, rtp_data_channels_);
+
+  if (!ConvertRtcOptionsForOffer(rtc_options, session_options)) {
+    return false;
+  }
+
+  if (session_->data_channel_type() == cricket::DCT_SCTP && HasDataChannels()) {
+    session_options->data_channel_type = cricket::DCT_SCTP;
+  }
+  return true;
+}
+
+bool PeerConnection::GetOptionsForAnswer(
+    const MediaConstraintsInterface* constraints,
+    cricket::MediaSessionOptions* session_options) {
+  SetStreams(session_options, local_streams_, rtp_data_channels_);
+  session_options->recv_audio = false;
+  session_options->recv_video = false;
+
+  if (!ParseConstraintsForAnswer(constraints, session_options)) {
+    return false;
+  }
+
+  // RTP data channel is handled in MediaSessionOptions::AddStream. SCTP streams
+  // are not signaled in the SDP so does not go through that path and must be
+  // handled here.
+  if (session_->data_channel_type() == cricket::DCT_SCTP) {
+    session_options->data_channel_type = cricket::DCT_SCTP;
+  }
+  return true;
+}
+
+void PeerConnection::UpdateRemoteStreamsList(
+    const cricket::StreamParamsVec& streams,
+    cricket::MediaType media_type,
+    StreamCollection* new_streams) {
+  TrackInfos* current_tracks = GetRemoteTracks(media_type);
+
+  // Find removed tracks. I.e., tracks where the track id or ssrc don't match
+  // the
+  // new StreamParam.
+  auto track_it = current_tracks->begin();
+  while (track_it != current_tracks->end()) {
+    const TrackInfo& info = *track_it;
+    const cricket::StreamParams* params =
+        cricket::GetStreamBySsrc(streams, info.ssrc);
+    if (!params || params->id != info.track_id) {
+      OnRemoteTrackRemoved(info.stream_label, info.track_id, media_type);
+      track_it = current_tracks->erase(track_it);
+    } else {
+      ++track_it;
+    }
+  }
+
+  // Find new and active tracks.
+  for (const cricket::StreamParams& params : streams) {
+    // The sync_label is the MediaStream label and the |stream.id| is the
+    // track id.
+    const std::string& stream_label = params.sync_label;
+    const std::string& track_id = params.id;
+    uint32_t ssrc = params.first_ssrc();
+
+    rtc::scoped_refptr<MediaStreamInterface> stream =
+        remote_streams_->find(stream_label);
+    if (!stream) {
+      // This is a new MediaStream. Create a new remote MediaStream.
+      stream = remote_stream_factory_->CreateMediaStream(stream_label);
+      remote_streams_->AddStream(stream);
+      new_streams->AddStream(stream);
+    }
+
+    const TrackInfo* track_info =
+        FindTrackInfo(*current_tracks, stream_label, track_id);
+    if (!track_info) {
+      current_tracks->push_back(TrackInfo(stream_label, track_id, ssrc));
+      OnRemoteTrackSeen(stream_label, track_id, ssrc, media_type);
+    }
+  }
+}
+
+void PeerConnection::OnRemoteTrackSeen(const std::string& stream_label,
+                                       const std::string& track_id,
+                                       uint32_t ssrc,
+                                       cricket::MediaType media_type) {
+  MediaStreamInterface* stream = remote_streams_->find(stream_label);
+
+  if (media_type == cricket::MEDIA_TYPE_AUDIO) {
+    AudioTrackInterface* audio_track =
+        remote_stream_factory_->AddAudioTrack(stream, track_id);
+    CreateAudioReceiver(stream, audio_track, ssrc);
+  } else if (media_type == cricket::MEDIA_TYPE_VIDEO) {
+    VideoTrackInterface* video_track =
+        remote_stream_factory_->AddVideoTrack(stream, track_id);
+    CreateVideoReceiver(stream, video_track, ssrc);
+  } else {
+    RTC_DCHECK(false && "Invalid media type");
+  }
+}
+
+void PeerConnection::OnRemoteTrackRemoved(const std::string& stream_label,
+                                          const std::string& track_id,
+                                          cricket::MediaType media_type) {
+  MediaStreamInterface* stream = remote_streams_->find(stream_label);
+
+  if (media_type == cricket::MEDIA_TYPE_AUDIO) {
+    rtc::scoped_refptr<AudioTrackInterface> audio_track =
+        stream->FindAudioTrack(track_id);
+    if (audio_track) {
+      audio_track->set_state(webrtc::MediaStreamTrackInterface::kEnded);
+      stream->RemoveTrack(audio_track);
+      DestroyAudioReceiver(stream, audio_track);
+    }
+  } else if (media_type == cricket::MEDIA_TYPE_VIDEO) {
+    rtc::scoped_refptr<VideoTrackInterface> video_track =
+        stream->FindVideoTrack(track_id);
+    if (video_track) {
+      video_track->set_state(webrtc::MediaStreamTrackInterface::kEnded);
+      stream->RemoveTrack(video_track);
+      DestroyVideoReceiver(stream, video_track);
+    }
+  } else {
+    ASSERT(false && "Invalid media type");
+  }
+}
+
+void PeerConnection::UpdateEndedRemoteMediaStreams() {
+  std::vector<rtc::scoped_refptr<MediaStreamInterface>> streams_to_remove;
+  for (size_t i = 0; i < remote_streams_->count(); ++i) {
+    MediaStreamInterface* stream = remote_streams_->at(i);
+    if (stream->GetAudioTracks().empty() && stream->GetVideoTracks().empty()) {
+      streams_to_remove.push_back(stream);
+    }
+  }
+
+  for (const auto& stream : streams_to_remove) {
+    remote_streams_->RemoveStream(stream);
+    observer_->OnRemoveStream(stream);
+  }
+}
+
+void PeerConnection::MaybeCreateDefaultStream() {
+  if (!remote_info_.IsDefaultMediaStreamNeeded()) {
+    return;
+  }
+
+  bool default_created = false;
+
+  rtc::scoped_refptr<MediaStreamInterface> default_remote_stream =
+      remote_streams_->find(kDefaultStreamLabel);
+  if (default_remote_stream == nullptr) {
+    default_created = true;
+    default_remote_stream =
+        remote_stream_factory_->CreateMediaStream(kDefaultStreamLabel);
+    remote_streams_->AddStream(default_remote_stream);
+  }
+  if (remote_info_.default_audio_track_needed &&
+      default_remote_stream->GetAudioTracks().size() == 0) {
+    remote_audio_tracks_.push_back(
+        TrackInfo(kDefaultStreamLabel, kDefaultAudioTrackLabel, 0));
+    OnRemoteTrackSeen(kDefaultStreamLabel, kDefaultAudioTrackLabel, 0,
+                      cricket::MEDIA_TYPE_AUDIO);
+  }
+  if (remote_info_.default_video_track_needed &&
+      default_remote_stream->GetVideoTracks().size() == 0) {
+    remote_video_tracks_.push_back(
+        TrackInfo(kDefaultStreamLabel, kDefaultVideoTrackLabel, 0));
+    OnRemoteTrackSeen(kDefaultStreamLabel, kDefaultVideoTrackLabel, 0,
+                      cricket::MEDIA_TYPE_VIDEO);
+  }
+  if (default_created) {
+    stats_->AddStream(default_remote_stream);
+    observer_->OnAddStream(default_remote_stream);
+  }
+}
+
+void PeerConnection::EndRemoteTracks(cricket::MediaType media_type) {
+  TrackInfos* current_tracks = GetRemoteTracks(media_type);
+  for (TrackInfos::iterator track_it = current_tracks->begin();
+       track_it != current_tracks->end(); ++track_it) {
+    const TrackInfo& info = *track_it;
+    MediaStreamInterface* stream = remote_streams_->find(info.stream_label);
+    if (media_type == cricket::MEDIA_TYPE_AUDIO) {
+      AudioTrackInterface* track = stream->FindAudioTrack(info.track_id);
+      // There's no guarantee the track is still available, e.g. the track may
+      // have been removed from the stream by javascript.
+      if (track) {
+        track->set_state(webrtc::MediaStreamTrackInterface::kEnded);
+      }
+    }
+    if (media_type == cricket::MEDIA_TYPE_VIDEO) {
+      VideoTrackInterface* track = stream->FindVideoTrack(info.track_id);
+      // There's no guarantee the track is still available, e.g. the track may
+      // have been removed from the stream by javascript.
+      if (track) {
+        track->set_state(webrtc::MediaStreamTrackInterface::kEnded);
+      }
+    }
+  }
+}
+
+void PeerConnection::UpdateLocalTracks(
+    const std::vector<cricket::StreamParams>& streams,
+    cricket::MediaType media_type) {
+  TrackInfos* current_tracks = GetLocalTracks(media_type);
+
+  // Find removed tracks. I.e., tracks where the track id, stream label or ssrc
+  // don't match the new StreamParam.
+  TrackInfos::iterator track_it = current_tracks->begin();
+  while (track_it != current_tracks->end()) {
+    const TrackInfo& info = *track_it;
+    const cricket::StreamParams* params =
+        cricket::GetStreamBySsrc(streams, info.ssrc);
+    if (!params || params->id != info.track_id ||
+        params->sync_label != info.stream_label) {
+      OnLocalTrackRemoved(info.stream_label, info.track_id, info.ssrc,
+                          media_type);
+      track_it = current_tracks->erase(track_it);
+    } else {
+      ++track_it;
+    }
+  }
+
+  // Find new and active tracks.
+  for (const cricket::StreamParams& params : streams) {
+    // The sync_label is the MediaStream label and the |stream.id| is the
+    // track id.
+    const std::string& stream_label = params.sync_label;
+    const std::string& track_id = params.id;
+    uint32_t ssrc = params.first_ssrc();
+    const TrackInfo* track_info =
+        FindTrackInfo(*current_tracks, stream_label, track_id);
+    if (!track_info) {
+      current_tracks->push_back(TrackInfo(stream_label, track_id, ssrc));
+      OnLocalTrackSeen(stream_label, track_id, params.first_ssrc(), media_type);
+    }
+  }
+}
+
+void PeerConnection::OnLocalTrackSeen(const std::string& stream_label,
+                                      const std::string& track_id,
+                                      uint32_t ssrc,
+                                      cricket::MediaType media_type) {
+  MediaStreamInterface* stream = local_streams_->find(stream_label);
+  if (!stream) {
+    LOG(LS_WARNING) << "An unknown local MediaStream with label "
+                    << stream_label << " has been configured.";
+    return;
+  }
+
+  if (media_type == cricket::MEDIA_TYPE_AUDIO) {
+    AudioTrackInterface* audio_track = stream->FindAudioTrack(track_id);
+    if (!audio_track) {
+      LOG(LS_WARNING) << "An unknown local AudioTrack with id , " << track_id
+                      << " has been configured.";
+      return;
+    }
+    CreateAudioSender(stream, audio_track, ssrc);
+  } else if (media_type == cricket::MEDIA_TYPE_VIDEO) {
+    VideoTrackInterface* video_track = stream->FindVideoTrack(track_id);
+    if (!video_track) {
+      LOG(LS_WARNING) << "An unknown local VideoTrack with id , " << track_id
+                      << " has been configured.";
+      return;
+    }
+    CreateVideoSender(stream, video_track, ssrc);
+  } else {
+    RTC_DCHECK(false && "Invalid media type");
+  }
+}
+
+void PeerConnection::OnLocalTrackRemoved(const std::string& stream_label,
+                                         const std::string& track_id,
+                                         uint32_t ssrc,
+                                         cricket::MediaType media_type) {
+  MediaStreamInterface* stream = local_streams_->find(stream_label);
+  if (!stream) {
+    // This is the normal case. I.e., RemoveLocalStream has been called and the
+    // SessionDescriptions has been renegotiated.
+    return;
+  }
+  // A track has been removed from the SessionDescription but the MediaStream
+  // is still associated with PeerConnection. This only occurs if the SDP
+  // doesn't match with the calls to AddLocalStream and RemoveLocalStream.
+  if (media_type == cricket::MEDIA_TYPE_AUDIO) {
+    AudioTrackInterface* audio_track = stream->FindAudioTrack(track_id);
+    if (!audio_track) {
+      return;
+    }
+    DestroyAudioSender(stream, audio_track, ssrc);
+  } else if (media_type == cricket::MEDIA_TYPE_VIDEO) {
+    VideoTrackInterface* video_track = stream->FindVideoTrack(track_id);
+    if (!video_track) {
+      return;
+    }
+    DestroyVideoSender(stream, video_track);
+  } else {
+    RTC_DCHECK(false && "Invalid media type.");
+  }
+}
+
+void PeerConnection::UpdateLocalRtpDataChannels(
+    const cricket::StreamParamsVec& streams) {
+  std::vector<std::string> existing_channels;
+
+  // Find new and active data channels.
+  for (const cricket::StreamParams& params : streams) {
+    // |it->sync_label| is actually the data channel label. The reason is that
+    // we use the same naming of data channels as we do for
+    // MediaStreams and Tracks.
+    // For MediaStreams, the sync_label is the MediaStream label and the
+    // track label is the same as |streamid|.
+    const std::string& channel_label = params.sync_label;
+    auto data_channel_it = rtp_data_channels_.find(channel_label);
+    if (!VERIFY(data_channel_it != rtp_data_channels_.end())) {
+      continue;
+    }
+    // Set the SSRC the data channel should use for sending.
+    data_channel_it->second->SetSendSsrc(params.first_ssrc());
+    existing_channels.push_back(data_channel_it->first);
+  }
+
+  UpdateClosingRtpDataChannels(existing_channels, true);
+}
+
+void PeerConnection::UpdateRemoteRtpDataChannels(
+    const cricket::StreamParamsVec& streams) {
+  std::vector<std::string> existing_channels;
+
+  // Find new and active data channels.
+  for (const cricket::StreamParams& params : streams) {
+    // The data channel label is either the mslabel or the SSRC if the mslabel
+    // does not exist. Ex a=ssrc:444330170 mslabel:test1.
+    std::string label = params.sync_label.empty()
+                            ? rtc::ToString(params.first_ssrc())
+                            : params.sync_label;
+    auto data_channel_it = rtp_data_channels_.find(label);
+    if (data_channel_it == rtp_data_channels_.end()) {
+      // This is a new data channel.
+      CreateRemoteRtpDataChannel(label, params.first_ssrc());
+    } else {
+      data_channel_it->second->SetReceiveSsrc(params.first_ssrc());
+    }
+    existing_channels.push_back(label);
+  }
+
+  UpdateClosingRtpDataChannels(existing_channels, false);
+}
+
+void PeerConnection::UpdateClosingRtpDataChannels(
+    const std::vector<std::string>& active_channels,
+    bool is_local_update) {
+  auto it = rtp_data_channels_.begin();
+  while (it != rtp_data_channels_.end()) {
+    DataChannel* data_channel = it->second;
+    if (std::find(active_channels.begin(), active_channels.end(),
+                  data_channel->label()) != active_channels.end()) {
+      ++it;
+      continue;
+    }
+
+    if (is_local_update) {
+      data_channel->SetSendSsrc(0);
+    } else {
+      data_channel->RemotePeerRequestClose();
+    }
+
+    if (data_channel->state() == DataChannel::kClosed) {
+      rtp_data_channels_.erase(it);
+      it = rtp_data_channels_.begin();
+    } else {
+      ++it;
+    }
+  }
+}
+
+void PeerConnection::CreateRemoteRtpDataChannel(const std::string& label,
+                                                uint32_t remote_ssrc) {
+  rtc::scoped_refptr<DataChannel> channel(
+      InternalCreateDataChannel(label, nullptr));
+  if (!channel.get()) {
+    LOG(LS_WARNING) << "Remote peer requested a DataChannel but"
+                    << "CreateDataChannel failed.";
+    return;
+  }
+  channel->SetReceiveSsrc(remote_ssrc);
+  observer_->OnDataChannel(
+      DataChannelProxy::Create(signaling_thread(), channel));
+}
+
+rtc::scoped_refptr<DataChannel> PeerConnection::InternalCreateDataChannel(
+    const std::string& label,
+    const InternalDataChannelInit* config) {
+  if (IsClosed()) {
+    return nullptr;
+  }
+  if (session_->data_channel_type() == cricket::DCT_NONE) {
+    LOG(LS_ERROR)
+        << "InternalCreateDataChannel: Data is not supported in this call.";
+    return nullptr;
+  }
+  InternalDataChannelInit new_config =
+      config ? (*config) : InternalDataChannelInit();
+  if (session_->data_channel_type() == cricket::DCT_SCTP) {
+    if (new_config.id < 0) {
+      rtc::SSLRole role;
+      if (session_->GetSslRole(&role) &&
+          !sid_allocator_.AllocateSid(role, &new_config.id)) {
+        LOG(LS_ERROR) << "No id can be allocated for the SCTP data channel.";
+        return nullptr;
+      }
+    } else if (!sid_allocator_.ReserveSid(new_config.id)) {
+      LOG(LS_ERROR) << "Failed to create a SCTP data channel "
+                    << "because the id is already in use or out of range.";
+      return nullptr;
+    }
+  }
+
+  rtc::scoped_refptr<DataChannel> channel(DataChannel::Create(
+      session_.get(), session_->data_channel_type(), label, new_config));
+  if (!channel) {
+    sid_allocator_.ReleaseSid(new_config.id);
+    return nullptr;
+  }
+
+  if (channel->data_channel_type() == cricket::DCT_RTP) {
+    if (rtp_data_channels_.find(channel->label()) != rtp_data_channels_.end()) {
+      LOG(LS_ERROR) << "DataChannel with label " << channel->label()
+                    << " already exists.";
+      return nullptr;
+    }
+    rtp_data_channels_[channel->label()] = channel;
+  } else {
+    RTC_DCHECK(channel->data_channel_type() == cricket::DCT_SCTP);
+    sctp_data_channels_.push_back(channel);
+    channel->SignalClosed.connect(this,
+                                  &PeerConnection::OnSctpDataChannelClosed);
+  }
+
+  return channel;
+}
+
+bool PeerConnection::HasDataChannels() const {
+  return !rtp_data_channels_.empty() || !sctp_data_channels_.empty();
+}
+
+void PeerConnection::AllocateSctpSids(rtc::SSLRole role) {
+  for (const auto& channel : sctp_data_channels_) {
+    if (channel->id() < 0) {
+      int sid;
+      if (!sid_allocator_.AllocateSid(role, &sid)) {
+        LOG(LS_ERROR) << "Failed to allocate SCTP sid.";
+        continue;
+      }
+      channel->SetSctpSid(sid);
+    }
+  }
+}
+
+void PeerConnection::OnSctpDataChannelClosed(DataChannel* channel) {
+  for (auto it = sctp_data_channels_.begin(); it != sctp_data_channels_.end();
+       ++it) {
+    if (it->get() == channel) {
+      int sid = channel->id();
+      RTC_DCHECK(sid >= 0);
+      sid_allocator_.ReleaseSid(sid);
+      sctp_data_channels_.erase(it);
+      return;
+    }
+  }
+}
+
+void PeerConnection::OnVoiceChannelDestroyed() {
+  EndRemoteTracks(cricket::MEDIA_TYPE_AUDIO);
+}
+
+void PeerConnection::OnVideoChannelDestroyed() {
+  EndRemoteTracks(cricket::MEDIA_TYPE_VIDEO);
+}
+
+void PeerConnection::OnDataChannelCreated() {
+  for (const auto& channel : sctp_data_channels_) {
+    channel->OnTransportChannelCreated();
+  }
+}
+
+void PeerConnection::OnDataChannelDestroyed() {
+  // Use a temporary copy of the RTP/SCTP DataChannel list because the
+  // DataChannel may callback to us and try to modify the list.
+  std::map<std::string, rtc::scoped_refptr<DataChannel>> temp_rtp_dcs;
+  temp_rtp_dcs.swap(rtp_data_channels_);
+  for (const auto& kv : temp_rtp_dcs) {
+    kv.second->OnTransportChannelDestroyed();
+  }
+
+  std::vector<rtc::scoped_refptr<DataChannel>> temp_sctp_dcs;
+  temp_sctp_dcs.swap(sctp_data_channels_);
+  for (const auto& channel : temp_sctp_dcs) {
+    channel->OnTransportChannelDestroyed();
+  }
+}
+
+void PeerConnection::OnDataChannelOpenMessage(
+    const std::string& label,
+    const InternalDataChannelInit& config) {
+  rtc::scoped_refptr<DataChannel> channel(
+      InternalCreateDataChannel(label, &config));
+  if (!channel.get()) {
+    LOG(LS_ERROR) << "Failed to create DataChannel from the OPEN message.";
+    return;
+  }
+
+  observer_->OnDataChannel(
+      DataChannelProxy::Create(signaling_thread(), channel));
+}
+
 std::vector<rtc::scoped_refptr<RtpSenderInterface>>::iterator
 PeerConnection::FindSenderForTrack(MediaStreamTrackInterface* track) {
   return std::find_if(
@@ -1016,4 +1918,42 @@
       });
 }
 
+PeerConnection::TrackInfos* PeerConnection::GetRemoteTracks(
+    cricket::MediaType media_type) {
+  RTC_DCHECK(media_type == cricket::MEDIA_TYPE_AUDIO ||
+             media_type == cricket::MEDIA_TYPE_VIDEO);
+  return (media_type == cricket::MEDIA_TYPE_AUDIO) ? &remote_audio_tracks_
+                                                   : &remote_video_tracks_;
+}
+
+PeerConnection::TrackInfos* PeerConnection::GetLocalTracks(
+    cricket::MediaType media_type) {
+  RTC_DCHECK(media_type == cricket::MEDIA_TYPE_AUDIO ||
+             media_type == cricket::MEDIA_TYPE_VIDEO);
+  return (media_type == cricket::MEDIA_TYPE_AUDIO) ? &local_audio_tracks_
+                                                   : &local_video_tracks_;
+}
+
+const PeerConnection::TrackInfo* PeerConnection::FindTrackInfo(
+    const PeerConnection::TrackInfos& infos,
+    const std::string& stream_label,
+    const std::string track_id) const {
+  for (const TrackInfo& track_info : infos) {
+    if (track_info.stream_label == stream_label &&
+        track_info.track_id == track_id) {
+      return &track_info;
+    }
+  }
+  return nullptr;
+}
+
+DataChannel* PeerConnection::FindDataChannelBySid(int sid) const {
+  for (const auto& channel : sctp_data_channels_) {
+    if (channel->id() == sid) {
+      return channel;
+    }
+  }
+  return nullptr;
+}
+
 }  // namespace webrtc
diff --git a/talk/app/webrtc/peerconnection.h b/talk/app/webrtc/peerconnection.h
index 3d6ce1b..0c04898 100644
--- a/talk/app/webrtc/peerconnection.h
+++ b/talk/app/webrtc/peerconnection.h
@@ -31,7 +31,6 @@
 #include <string>
 
 #include "talk/app/webrtc/dtlsidentitystore.h"
-#include "talk/app/webrtc/mediastreamsignaling.h"
 #include "talk/app/webrtc/peerconnectionfactory.h"
 #include "talk/app/webrtc/peerconnectioninterface.h"
 #include "talk/app/webrtc/rtpreceiverinterface.h"
@@ -43,11 +42,26 @@
 
 namespace webrtc {
 
+class RemoteMediaStreamFactory;
+
 typedef std::vector<PortAllocatorFactoryInterface::StunConfiguration>
     StunConfigurations;
 typedef std::vector<PortAllocatorFactoryInterface::TurnConfiguration>
     TurnConfigurations;
 
+// Populates |session_options| from |rtc_options|, and returns true if options
+// are valid.
+// Send streams should already be added to |session_options| before this method
+// is called, as this affects the values of recv_audio and recv_video.
+bool ConvertRtcOptionsForOffer(
+    const PeerConnectionInterface::RTCOfferAnswerOptions& rtc_options,
+    cricket::MediaSessionOptions* session_options);
+
+// Populates |session_options| from |constraints|, and returns true if all
+// mandatory constraints are satisfied.
+bool ParseConstraintsForAnswer(const MediaConstraintsInterface* constraints,
+                               cricket::MediaSessionOptions* session_options);
+
 // Parses the URLs for each server in |servers| to build |stun_config| and
 // |turn_config|.
 bool ParseIceServers(const PeerConnectionInterface::IceServers& servers,
@@ -55,10 +69,8 @@
                      TurnConfigurations* turn_config);
 
 // PeerConnection implements the PeerConnectionInterface interface.
-// It uses MediaStreamSignaling and WebRtcSession to implement
-// the PeerConnection functionality.
+// It uses WebRtcSession to implement the PeerConnection functionality.
 class PeerConnection : public PeerConnectionInterface,
-                       public MediaStreamSignalingObserver,
                        public IceObserver,
                        public rtc::MessageHandler,
                        public sigslot::has_slots<> {
@@ -76,6 +88,8 @@
   bool AddStream(MediaStreamInterface* local_stream) override;
   void RemoveStream(MediaStreamInterface* local_stream) override;
 
+  virtual WebRtcSession* session() { return session_.get(); }
+
   rtc::scoped_refptr<DtmfSenderInterface> CreateDtmfSender(
       AudioTrackInterface* track) override;
 
@@ -120,39 +134,72 @@
 
   void Close() override;
 
+  // Virtual for unit tests.
+  virtual const std::vector<rtc::scoped_refptr<DataChannel>>&
+  sctp_data_channels() const {
+    return sctp_data_channels_;
+  };
+
  protected:
   ~PeerConnection() override;
 
  private:
+  struct TrackInfo {
+    TrackInfo() : ssrc(0) {}
+    TrackInfo(const std::string& stream_label,
+              const std::string track_id,
+              uint32_t ssrc)
+        : stream_label(stream_label), track_id(track_id), ssrc(ssrc) {}
+    std::string stream_label;
+    std::string track_id;
+    uint32_t ssrc;
+  };
+  typedef std::vector<TrackInfo> TrackInfos;
+
+  struct RemotePeerInfo {
+    RemotePeerInfo()
+        : msid_supported(false),
+          default_audio_track_needed(false),
+          default_video_track_needed(false) {}
+    // True if it has been discovered that the remote peer support MSID.
+    bool msid_supported;
+    // The remote peer indicates in the session description that audio will be
+    // sent but no MSID is given.
+    bool default_audio_track_needed;
+    // The remote peer indicates in the session description that video will be
+    // sent but no MSID is given.
+    bool default_video_track_needed;
+
+    bool IsDefaultMediaStreamNeeded() {
+      return !msid_supported &&
+             (default_audio_track_needed || default_video_track_needed);
+    }
+  };
+
   // Implements MessageHandler.
   void OnMessage(rtc::Message* msg) override;
 
-  // Implements MediaStreamSignalingObserver.
-  void OnAddRemoteStream(MediaStreamInterface* stream) override;
-  void OnRemoveRemoteStream(MediaStreamInterface* stream) override;
-  void OnAddDataChannel(DataChannelInterface* data_channel) override;
-  void OnAddRemoteAudioTrack(MediaStreamInterface* stream,
-                             AudioTrackInterface* audio_track,
-                             uint32_t ssrc) override;
-  void OnAddRemoteVideoTrack(MediaStreamInterface* stream,
-                             VideoTrackInterface* video_track,
-                             uint32_t ssrc) override;
-  void OnRemoveRemoteAudioTrack(MediaStreamInterface* stream,
-                                AudioTrackInterface* audio_track) override;
-  void OnRemoveRemoteVideoTrack(MediaStreamInterface* stream,
-                                VideoTrackInterface* video_track) override;
-  void OnAddLocalAudioTrack(MediaStreamInterface* stream,
-                            AudioTrackInterface* audio_track,
-                            uint32_t ssrc) override;
-  void OnAddLocalVideoTrack(MediaStreamInterface* stream,
-                            VideoTrackInterface* video_track,
-                            uint32_t ssrc) override;
-  void OnRemoveLocalAudioTrack(MediaStreamInterface* stream,
-                               AudioTrackInterface* audio_track,
-                               uint32_t ssrc) override;
-  void OnRemoveLocalVideoTrack(MediaStreamInterface* stream,
-                               VideoTrackInterface* video_track) override;
-  void OnRemoveLocalStream(MediaStreamInterface* stream) override;
+  void CreateAudioReceiver(MediaStreamInterface* stream,
+                           AudioTrackInterface* audio_track,
+                           uint32_t ssrc);
+  void CreateVideoReceiver(MediaStreamInterface* stream,
+                           VideoTrackInterface* video_track,
+                           uint32_t ssrc);
+  void DestroyAudioReceiver(MediaStreamInterface* stream,
+                            AudioTrackInterface* audio_track);
+  void DestroyVideoReceiver(MediaStreamInterface* stream,
+                            VideoTrackInterface* video_track);
+  void CreateAudioSender(MediaStreamInterface* stream,
+                         AudioTrackInterface* audio_track,
+                         uint32_t ssrc);
+  void CreateVideoSender(MediaStreamInterface* stream,
+                         VideoTrackInterface* video_track,
+                         uint32_t ssrc);
+  void DestroyAudioSender(MediaStreamInterface* stream,
+                          AudioTrackInterface* audio_track,
+                          uint32_t ssrc);
+  void DestroyVideoSender(MediaStreamInterface* stream,
+                          VideoTrackInterface* video_track);
 
   // Implements IceObserver
   void OnIceConnectionChange(IceConnectionState new_state) override;
@@ -172,21 +219,138 @@
 
   void PostSetSessionDescriptionFailure(SetSessionDescriptionObserver* observer,
                                         const std::string& error);
+  void PostCreateSessionDescriptionFailure(
+      CreateSessionDescriptionObserver* observer,
+      const std::string& error);
 
   bool IsClosed() const {
     return signaling_state_ == PeerConnectionInterface::kClosed;
   }
 
+  // Returns a MediaSessionOptions struct with options decided by |options|,
+  // the local MediaStreams and DataChannels.
+  virtual bool GetOptionsForOffer(
+      const PeerConnectionInterface::RTCOfferAnswerOptions& rtc_options,
+      cricket::MediaSessionOptions* session_options);
+
+  // Returns a MediaSessionOptions struct with options decided by
+  // |constraints|, the local MediaStreams and DataChannels.
+  virtual bool GetOptionsForAnswer(
+      const MediaConstraintsInterface* constraints,
+      cricket::MediaSessionOptions* session_options);
+
+  // Makes sure a MediaStream Track is created for each StreamParam in
+  // |streams|. |media_type| is the type of the |streams| and can be either
+  // audio or video.
+  // If a new MediaStream is created it is added to |new_streams|.
+  void UpdateRemoteStreamsList(
+      const std::vector<cricket::StreamParams>& streams,
+      cricket::MediaType media_type,
+      StreamCollection* new_streams);
+
+  // Triggered when a remote track has been seen for the first time in a remote
+  // session description. It creates a remote MediaStreamTrackInterface
+  // implementation and triggers CreateAudioReceiver or CreateVideoReceiver.
+  void OnRemoteTrackSeen(const std::string& stream_label,
+                         const std::string& track_id,
+                         uint32_t ssrc,
+                         cricket::MediaType media_type);
+
+  // Triggered when a remote track has been removed from a remote session
+  // description. It removes the remote track with id |track_id| from a remote
+  // MediaStream and triggers DestroyAudioReceiver or DestroyVideoReceiver.
+  void OnRemoteTrackRemoved(const std::string& stream_label,
+                            const std::string& track_id,
+                            cricket::MediaType media_type);
+
+  // Finds remote MediaStreams without any tracks and removes them from
+  // |remote_streams_| and notifies the observer that the MediaStreams no longer
+  // exist.
+  void UpdateEndedRemoteMediaStreams();
+
+  void MaybeCreateDefaultStream();
+
+  // Set the MediaStreamTrackInterface::TrackState to |kEnded| on all remote
+  // tracks of type |media_type|.
+  void EndRemoteTracks(cricket::MediaType media_type);
+
+  // Loops through the vector of |streams| and finds added and removed
+  // StreamParams since last time this method was called.
+  // For each new or removed StreamParam, OnLocalTrackSeen or
+  // OnLocalTrackRemoved is invoked.
+  void UpdateLocalTracks(const std::vector<cricket::StreamParams>& streams,
+                         cricket::MediaType media_type);
+
+  // Triggered when a local track has been seen for the first time in a local
+  // session description.
+  // This method triggers CreateAudioSender or CreateVideoSender if the rtp
+  // streams in the local SessionDescription can be mapped to a MediaStreamTrack
+  // in a MediaStream in |local_streams_|
+  void OnLocalTrackSeen(const std::string& stream_label,
+                        const std::string& track_id,
+                        uint32_t ssrc,
+                        cricket::MediaType media_type);
+
+  // Triggered when a local track has been removed from a local session
+  // description.
+  // This method triggers DestroyAudioSender or DestroyVideoSender if a stream
+  // has been removed from the local SessionDescription and the stream can be
+  // mapped to a MediaStreamTrack in a MediaStream in |local_streams_|.
+  void OnLocalTrackRemoved(const std::string& stream_label,
+                           const std::string& track_id,
+                           uint32_t ssrc,
+                           cricket::MediaType media_type);
+
+  void UpdateLocalRtpDataChannels(const cricket::StreamParamsVec& streams);
+  void UpdateRemoteRtpDataChannels(const cricket::StreamParamsVec& streams);
+  void UpdateClosingRtpDataChannels(
+      const std::vector<std::string>& active_channels,
+      bool is_local_update);
+  void CreateRemoteRtpDataChannel(const std::string& label,
+                                  uint32_t remote_ssrc);
+
+  // Creates channel and adds it to the collection of DataChannels that will
+  // be offered in a SessionDescription.
+  rtc::scoped_refptr<DataChannel> InternalCreateDataChannel(
+      const std::string& label,
+      const InternalDataChannelInit* config);
+
+  // Checks if any data channel has been added.
+  bool HasDataChannels() const;
+
+  void AllocateSctpSids(rtc::SSLRole role);
+  void OnSctpDataChannelClosed(DataChannel* channel);
+
+  // Notifications from WebRtcSession relating to BaseChannels.
+  void OnVoiceChannelDestroyed();
+  void OnVideoChannelDestroyed();
+  void OnDataChannelCreated();
+  void OnDataChannelDestroyed();
+  // Called when the cricket::DataChannel receives a message indicating that a
+  // webrtc::DataChannel should be opened.
+  void OnDataChannelOpenMessage(const std::string& label,
+                                const InternalDataChannelInit& config);
+
   std::vector<rtc::scoped_refptr<RtpSenderInterface>>::iterator
   FindSenderForTrack(MediaStreamTrackInterface* track);
   std::vector<rtc::scoped_refptr<RtpReceiverInterface>>::iterator
   FindReceiverForTrack(MediaStreamTrackInterface* track);
 
+  TrackInfos* GetRemoteTracks(cricket::MediaType media_type);
+  TrackInfos* GetLocalTracks(cricket::MediaType media_type);
+  const TrackInfo* FindTrackInfo(const TrackInfos& infos,
+                                 const std::string& stream_label,
+                                 const std::string track_id) const;
+
+  // Returns the specified SCTP DataChannel in sctp_data_channels_,
+  // or nullptr if not found.
+  DataChannel* FindDataChannelBySid(int sid) const;
+
   // Storing the factory as a scoped reference pointer ensures that the memory
   // in the PeerConnectionFactoryImpl remains available as long as the
   // PeerConnection is running. It is passed to PeerConnection as a raw pointer.
   // However, since the reference counting is done in the
-  // PeerConnectionFactoryInteface all instances created using the raw pointer
+  // PeerConnectionFactoryInterface all instances created using the raw pointer
   // will refer to the same reference count.
   rtc::scoped_refptr<PeerConnectionFactory> factory_;
   PeerConnectionObserver* observer_;
@@ -198,12 +362,35 @@
   IceGatheringState ice_gathering_state_;
 
   rtc::scoped_ptr<cricket::PortAllocator> port_allocator_;
-  rtc::scoped_ptr<WebRtcSession> session_;
-  rtc::scoped_ptr<MediaStreamSignaling> mediastream_signaling_;
-  rtc::scoped_ptr<StatsCollector> stats_;
+
+  // Streams added via AddStream.
+  rtc::scoped_refptr<StreamCollection> local_streams_;
+  // Streams created as a result of SetRemoteDescription.
+  rtc::scoped_refptr<StreamCollection> remote_streams_;
+
+  // These lists store track info seen in local/remote descriptions.
+  TrackInfos remote_audio_tracks_;
+  TrackInfos remote_video_tracks_;
+  TrackInfos local_audio_tracks_;
+  TrackInfos local_video_tracks_;
+
+  SctpSidAllocator sid_allocator_;
+  // label -> DataChannel
+  std::map<std::string, rtc::scoped_refptr<DataChannel>> rtp_data_channels_;
+  std::vector<rtc::scoped_refptr<DataChannel>> sctp_data_channels_;
+
+  RemotePeerInfo remote_info_;
+  rtc::scoped_ptr<RemoteMediaStreamFactory> remote_stream_factory_;
 
   std::vector<rtc::scoped_refptr<RtpSenderInterface>> senders_;
   std::vector<rtc::scoped_refptr<RtpReceiverInterface>> receivers_;
+
+  // The session_ scoped_ptr is declared at the bottom of PeerConnection
+  // because its destruction fires signals (such as VoiceChannelDestroyed)
+  // which will trigger some final actions in PeerConnection...
+  rtc::scoped_ptr<WebRtcSession> session_;
+  // ... But stats_ depends on session_ so it should be destroyed even earlier.
+  rtc::scoped_ptr<StatsCollector> stats_;
 };
 
 }  // namespace webrtc
diff --git a/talk/app/webrtc/peerconnectionendtoend_unittest.cc b/talk/app/webrtc/peerconnectionendtoend_unittest.cc
index ceabf04..eacedd4 100644
--- a/talk/app/webrtc/peerconnectionendtoend_unittest.cc
+++ b/talk/app/webrtc/peerconnectionendtoend_unittest.cc
@@ -364,3 +364,35 @@
   EXPECT_EQ(1U, dc_1_observer->received_message_count());
   EXPECT_EQ(1U, dc_2_observer->received_message_count());
 }
+
+// Verifies that a DataChannel added from an OPEN message functions after
+// a channel has been previously closed (webrtc issue 3778).
+// This previously failed because the new channel re-uses the ID of the closed
+// channel, and the closed channel was incorrectly still assigned to the id.
+// TODO(deadbeef): This is disabled because there's currently a race condition
+// caused by the fact that a data channel signals that it's closed before it
+// really is. Re-enable this test once that's fixed.
+TEST_F(PeerConnectionEndToEndTest,
+       DISABLED_DataChannelFromOpenWorksAfterClose) {
+  MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+
+  CreatePcs();
+
+  webrtc::DataChannelInit init;
+  rtc::scoped_refptr<DataChannelInterface> caller_dc(
+      caller_->CreateDataChannel("data", init));
+
+  Negotiate();
+  WaitForConnection();
+
+  WaitForDataChannelsToOpen(caller_dc, callee_signaled_data_channels_, 0);
+  CloseDataChannels(caller_dc, callee_signaled_data_channels_, 0);
+
+  // Create a new channel and ensure it works after closing the previous one.
+  caller_dc = caller_->CreateDataChannel("data2", init);
+
+  WaitForDataChannelsToOpen(caller_dc, callee_signaled_data_channels_, 1);
+  TestDataChannelSendAndReceive(caller_dc, callee_signaled_data_channels_[1]);
+
+  CloseDataChannels(caller_dc, callee_signaled_data_channels_, 1);
+}
diff --git a/talk/app/webrtc/peerconnectionfactory.cc b/talk/app/webrtc/peerconnectionfactory.cc
index c329446..0887754 100644
--- a/talk/app/webrtc/peerconnectionfactory.cc
+++ b/talk/app/webrtc/peerconnectionfactory.cc
@@ -29,6 +29,7 @@
 
 #include "talk/app/webrtc/audiotrack.h"
 #include "talk/app/webrtc/localaudiosource.h"
+#include "talk/app/webrtc/mediastream.h"
 #include "talk/app/webrtc/mediastreamproxy.h"
 #include "talk/app/webrtc/mediastreamtrackproxy.h"
 #include "talk/app/webrtc/peerconnection.h"
diff --git a/talk/app/webrtc/peerconnectioninterface_unittest.cc b/talk/app/webrtc/peerconnectioninterface_unittest.cc
index 8b7c9cf..5e88658 100644
--- a/talk/app/webrtc/peerconnectioninterface_unittest.cc
+++ b/talk/app/webrtc/peerconnectioninterface_unittest.cc
@@ -27,15 +27,22 @@
 
 #include <string>
 
+#include "talk/app/webrtc/audiotrack.h"
 #include "talk/app/webrtc/fakeportallocatorfactory.h"
 #include "talk/app/webrtc/jsepsessiondescription.h"
+#include "talk/app/webrtc/mediastream.h"
 #include "talk/app/webrtc/mediastreaminterface.h"
+#include "talk/app/webrtc/peerconnection.h"
 #include "talk/app/webrtc/peerconnectioninterface.h"
+#include "talk/app/webrtc/rtpreceiverinterface.h"
+#include "talk/app/webrtc/rtpsenderinterface.h"
+#include "talk/app/webrtc/streamcollection.h"
 #include "talk/app/webrtc/test/fakeconstraints.h"
 #include "talk/app/webrtc/test/fakedtlsidentitystore.h"
 #include "talk/app/webrtc/test/mockpeerconnectionobservers.h"
 #include "talk/app/webrtc/test/testsdpstrings.h"
 #include "talk/app/webrtc/videosource.h"
+#include "talk/app/webrtc/videotrack.h"
 #include "talk/media/base/fakevideocapturer.h"
 #include "talk/media/sctp/sctpdataengine.h"
 #include "talk/session/media/mediasession.h"
@@ -60,6 +67,167 @@
 static const char kTurnHostname[] = "turn.example.org";
 static const uint32_t kTimeout = 10000U;
 
+static const char kStreams[][8] = {"stream1", "stream2"};
+static const char kAudioTracks[][32] = {"audiotrack0", "audiotrack1"};
+static const char kVideoTracks[][32] = {"videotrack0", "videotrack1"};
+
+// Reference SDP with a MediaStream with label "stream1" and audio track with
+// id "audio_1" and a video track with id "video_1;
+static const char kSdpStringWithStream1[] =
+    "v=0\r\n"
+    "o=- 0 0 IN IP4 127.0.0.1\r\n"
+    "s=-\r\n"
+    "t=0 0\r\n"
+    "a=ice-ufrag:e5785931\r\n"
+    "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n"
+    "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:"
+    "BD:AB:AE:40:EF:CE:9A:51:2C:2A:B1:9B:8B:78:84\r\n"
+    "m=audio 1 RTP/AVPF 103\r\n"
+    "a=mid:audio\r\n"
+    "a=rtpmap:103 ISAC/16000\r\n"
+    "a=ssrc:1 cname:stream1\r\n"
+    "a=ssrc:1 mslabel:stream1\r\n"
+    "a=ssrc:1 label:audiotrack0\r\n"
+    "m=video 1 RTP/AVPF 120\r\n"
+    "a=mid:video\r\n"
+    "a=rtpmap:120 VP8/90000\r\n"
+    "a=ssrc:2 cname:stream1\r\n"
+    "a=ssrc:2 mslabel:stream1\r\n"
+    "a=ssrc:2 label:videotrack0\r\n";
+
+// Reference SDP with two MediaStreams with label "stream1" and "stream2. Each
+// MediaStreams have one audio track and one video track.
+// This uses MSID.
+static const char kSdpStringWithStream1And2[] =
+    "v=0\r\n"
+    "o=- 0 0 IN IP4 127.0.0.1\r\n"
+    "s=-\r\n"
+    "t=0 0\r\n"
+    "a=ice-ufrag:e5785931\r\n"
+    "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n"
+    "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:"
+    "BD:AB:AE:40:EF:CE:9A:51:2C:2A:B1:9B:8B:78:84\r\n"
+    "a=msid-semantic: WMS stream1 stream2\r\n"
+    "m=audio 1 RTP/AVPF 103\r\n"
+    "a=mid:audio\r\n"
+    "a=rtpmap:103 ISAC/16000\r\n"
+    "a=ssrc:1 cname:stream1\r\n"
+    "a=ssrc:1 msid:stream1 audiotrack0\r\n"
+    "a=ssrc:3 cname:stream2\r\n"
+    "a=ssrc:3 msid:stream2 audiotrack1\r\n"
+    "m=video 1 RTP/AVPF 120\r\n"
+    "a=mid:video\r\n"
+    "a=rtpmap:120 VP8/0\r\n"
+    "a=ssrc:2 cname:stream1\r\n"
+    "a=ssrc:2 msid:stream1 videotrack0\r\n"
+    "a=ssrc:4 cname:stream2\r\n"
+    "a=ssrc:4 msid:stream2 videotrack1\r\n";
+
+// Reference SDP without MediaStreams. Msid is not supported.
+static const char kSdpStringWithoutStreams[] =
+    "v=0\r\n"
+    "o=- 0 0 IN IP4 127.0.0.1\r\n"
+    "s=-\r\n"
+    "t=0 0\r\n"
+    "a=ice-ufrag:e5785931\r\n"
+    "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n"
+    "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:"
+    "BD:AB:AE:40:EF:CE:9A:51:2C:2A:B1:9B:8B:78:84\r\n"
+    "m=audio 1 RTP/AVPF 103\r\n"
+    "a=mid:audio\r\n"
+    "a=rtpmap:103 ISAC/16000\r\n"
+    "m=video 1 RTP/AVPF 120\r\n"
+    "a=mid:video\r\n"
+    "a=rtpmap:120 VP8/90000\r\n";
+
+// Reference SDP without MediaStreams. Msid is supported.
+static const char kSdpStringWithMsidWithoutStreams[] =
+    "v=0\r\n"
+    "o=- 0 0 IN IP4 127.0.0.1\r\n"
+    "s=-\r\n"
+    "t=0 0\r\n"
+    "a=ice-ufrag:e5785931\r\n"
+    "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n"
+    "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:"
+    "BD:AB:AE:40:EF:CE:9A:51:2C:2A:B1:9B:8B:78:84\r\n"
+    "a=msid-semantic: WMS\r\n"
+    "m=audio 1 RTP/AVPF 103\r\n"
+    "a=mid:audio\r\n"
+    "a=rtpmap:103 ISAC/16000\r\n"
+    "m=video 1 RTP/AVPF 120\r\n"
+    "a=mid:video\r\n"
+    "a=rtpmap:120 VP8/90000\r\n";
+
+// Reference SDP without MediaStreams and audio only.
+static const char kSdpStringWithoutStreamsAudioOnly[] =
+    "v=0\r\n"
+    "o=- 0 0 IN IP4 127.0.0.1\r\n"
+    "s=-\r\n"
+    "t=0 0\r\n"
+    "a=ice-ufrag:e5785931\r\n"
+    "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n"
+    "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:"
+    "BD:AB:AE:40:EF:CE:9A:51:2C:2A:B1:9B:8B:78:84\r\n"
+    "m=audio 1 RTP/AVPF 103\r\n"
+    "a=mid:audio\r\n"
+    "a=rtpmap:103 ISAC/16000\r\n";
+
+// Reference SENDONLY SDP without MediaStreams. Msid is not supported.
+static const char kSdpStringSendOnlyWithoutStreams[] =
+    "v=0\r\n"
+    "o=- 0 0 IN IP4 127.0.0.1\r\n"
+    "s=-\r\n"
+    "t=0 0\r\n"
+    "a=ice-ufrag:e5785931\r\n"
+    "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n"
+    "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:"
+    "BD:AB:AE:40:EF:CE:9A:51:2C:2A:B1:9B:8B:78:84\r\n"
+    "m=audio 1 RTP/AVPF 103\r\n"
+    "a=mid:audio\r\n"
+    "a=sendonly\r\n"
+    "a=rtpmap:103 ISAC/16000\r\n"
+    "m=video 1 RTP/AVPF 120\r\n"
+    "a=mid:video\r\n"
+    "a=sendonly\r\n"
+    "a=rtpmap:120 VP8/90000\r\n";
+
+static const char kSdpStringInit[] =
+    "v=0\r\n"
+    "o=- 0 0 IN IP4 127.0.0.1\r\n"
+    "s=-\r\n"
+    "t=0 0\r\n"
+    "a=ice-ufrag:e5785931\r\n"
+    "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n"
+    "a=fingerprint:sha-256 58:AB:6E:F5:F1:E4:57:B7:E9:46:F4:86:04:28:F9:A7:ED:"
+    "BD:AB:AE:40:EF:CE:9A:51:2C:2A:B1:9B:8B:78:84\r\n"
+    "a=msid-semantic: WMS\r\n";
+
+static const char kSdpStringAudio[] =
+    "m=audio 1 RTP/AVPF 103\r\n"
+    "a=mid:audio\r\n"
+    "a=rtpmap:103 ISAC/16000\r\n";
+
+static const char kSdpStringVideo[] =
+    "m=video 1 RTP/AVPF 120\r\n"
+    "a=mid:video\r\n"
+    "a=rtpmap:120 VP8/90000\r\n";
+
+static const char kSdpStringMs1Audio0[] =
+    "a=ssrc:1 cname:stream1\r\n"
+    "a=ssrc:1 msid:stream1 audiotrack0\r\n";
+
+static const char kSdpStringMs1Video0[] =
+    "a=ssrc:2 cname:stream1\r\n"
+    "a=ssrc:2 msid:stream1 videotrack0\r\n";
+
+static const char kSdpStringMs1Audio1[] =
+    "a=ssrc:3 cname:stream1\r\n"
+    "a=ssrc:3 msid:stream1 audiotrack1\r\n";
+
+static const char kSdpStringMs1Video1[] =
+    "a=ssrc:4 cname:stream1\r\n"
+    "a=ssrc:4 msid:stream1 videotrack1\r\n";
+
 #define MAYBE_SKIP_TEST(feature)                    \
   if (!(feature())) {                               \
     LOG(LS_INFO) << "Feature disabled... skipping"; \
@@ -69,12 +237,14 @@
 using rtc::scoped_ptr;
 using rtc::scoped_refptr;
 using webrtc::AudioSourceInterface;
+using webrtc::AudioTrack;
 using webrtc::AudioTrackInterface;
 using webrtc::DataBuffer;
 using webrtc::DataChannelInterface;
 using webrtc::FakeConstraints;
 using webrtc::FakePortAllocatorFactory;
 using webrtc::IceCandidateInterface;
+using webrtc::MediaStream;
 using webrtc::MediaStreamInterface;
 using webrtc::MediaStreamTrackInterface;
 using webrtc::MockCreateSessionDescriptionObserver;
@@ -84,11 +254,18 @@
 using webrtc::PeerConnectionInterface;
 using webrtc::PeerConnectionObserver;
 using webrtc::PortAllocatorFactoryInterface;
+using webrtc::RtpReceiverInterface;
+using webrtc::RtpSenderInterface;
 using webrtc::SdpParseError;
 using webrtc::SessionDescriptionInterface;
+using webrtc::StreamCollection;
+using webrtc::StreamCollectionInterface;
 using webrtc::VideoSourceInterface;
+using webrtc::VideoTrack;
 using webrtc::VideoTrackInterface;
 
+typedef PeerConnectionInterface::RTCOfferAnswerOptions RTCOfferAnswerOptions;
+
 namespace {
 
 // Gets the first ssrc of given content type from the ContentInfo.
@@ -118,12 +295,97 @@
   }
 }
 
+// Check if |streams| contains the specified track.
+bool ContainsTrack(const std::vector<cricket::StreamParams>& streams,
+                   const std::string& stream_label,
+                   const std::string& track_id) {
+  for (const cricket::StreamParams& params : streams) {
+    if (params.sync_label == stream_label && params.id == track_id) {
+      return true;
+    }
+  }
+  return false;
+}
+
+// Check if |senders| contains the specified sender, by id.
+bool ContainsSender(
+    const std::vector<rtc::scoped_refptr<RtpSenderInterface>>& senders,
+    const std::string& id) {
+  for (const auto& sender : senders) {
+    if (sender->id() == id) {
+      return true;
+    }
+  }
+  return false;
+}
+
+// Create a collection of streams.
+// CreateStreamCollection(1) creates a collection that
+// correspond to kSdpStringWithStream1.
+// CreateStreamCollection(2) correspond to kSdpStringWithStream1And2.
+rtc::scoped_refptr<StreamCollection> CreateStreamCollection(
+    int number_of_streams) {
+  rtc::scoped_refptr<StreamCollection> local_collection(
+      StreamCollection::Create());
+
+  for (int i = 0; i < number_of_streams; ++i) {
+    rtc::scoped_refptr<webrtc::MediaStreamInterface> stream(
+        webrtc::MediaStream::Create(kStreams[i]));
+
+    // Add a local audio track.
+    rtc::scoped_refptr<webrtc::AudioTrackInterface> audio_track(
+        webrtc::AudioTrack::Create(kAudioTracks[i], nullptr));
+    stream->AddTrack(audio_track);
+
+    // Add a local video track.
+    rtc::scoped_refptr<webrtc::VideoTrackInterface> video_track(
+        webrtc::VideoTrack::Create(kVideoTracks[i], nullptr));
+    stream->AddTrack(video_track);
+
+    local_collection->AddStream(stream);
+  }
+  return local_collection;
+}
+
+// Check equality of StreamCollections.
+bool CompareStreamCollections(StreamCollectionInterface* s1,
+                              StreamCollectionInterface* s2) {
+  if (s1 == nullptr || s2 == nullptr || s1->count() != s2->count()) {
+    return false;
+  }
+
+  for (size_t i = 0; i != s1->count(); ++i) {
+    if (s1->at(i)->label() != s2->at(i)->label()) {
+      return false;
+    }
+    webrtc::AudioTrackVector audio_tracks1 = s1->at(i)->GetAudioTracks();
+    webrtc::AudioTrackVector audio_tracks2 = s2->at(i)->GetAudioTracks();
+    webrtc::VideoTrackVector video_tracks1 = s1->at(i)->GetVideoTracks();
+    webrtc::VideoTrackVector video_tracks2 = s2->at(i)->GetVideoTracks();
+
+    if (audio_tracks1.size() != audio_tracks2.size()) {
+      return false;
+    }
+    for (size_t j = 0; j != audio_tracks1.size(); ++j) {
+      if (audio_tracks1[j]->id() != audio_tracks2[j]->id()) {
+        return false;
+      }
+    }
+    if (video_tracks1.size() != video_tracks2.size()) {
+      return false;
+    }
+    for (size_t j = 0; j != video_tracks1.size(); ++j) {
+      if (video_tracks1[j]->id() != video_tracks2[j]->id()) {
+        return false;
+      }
+    }
+  }
+  return true;
+}
+
 class MockPeerConnectionObserver : public PeerConnectionObserver {
  public:
-  MockPeerConnectionObserver()
-      : renegotiation_needed_(false),
-        ice_complete_(false) {
-  }
+  MockPeerConnectionObserver() : remote_streams_(StreamCollection::Create()) {}
   ~MockPeerConnectionObserver() {
   }
   void SetPeerConnectionInterface(PeerConnectionInterface* pc) {
@@ -157,11 +419,18 @@
         break;
     }
   }
+
+  MediaStreamInterface* RemoteStream(const std::string& label) {
+    return remote_streams_->find(label);
+  }
+  StreamCollectionInterface* remote_streams() const { return remote_streams_; }
   virtual void OnAddStream(MediaStreamInterface* stream) {
     last_added_stream_ = stream;
+    remote_streams_->AddStream(stream);
   }
   virtual void OnRemoveStream(MediaStreamInterface* stream) {
     last_removed_stream_ = stream;
+    remote_streams_->RemoveStream(stream);
   }
   virtual void OnRenegotiationNeeded() {
     renegotiation_needed_ = true;
@@ -216,8 +485,9 @@
   PeerConnectionInterface::SignalingState state_;
   scoped_ptr<IceCandidateInterface> last_candidate_;
   scoped_refptr<DataChannelInterface> last_datachannel_;
-  bool renegotiation_needed_;
-  bool ice_complete_;
+  rtc::scoped_refptr<StreamCollection> remote_streams_;
+  bool renegotiation_needed_ = false;
+  bool ice_complete_ = false;
 
  private:
   scoped_refptr<MediaStreamInterface> last_added_stream_;
@@ -225,6 +495,7 @@
 };
 
 }  // namespace
+
 class PeerConnectionInterfaceTest : public testing::Test {
  protected:
   virtual void SetUp() {
@@ -327,7 +598,7 @@
     observer_.SetPeerConnectionInterface(NULL);
   }
 
-  void AddStream(const std::string& label) {
+  void AddVideoStream(const std::string& label) {
     // Create a local stream.
     scoped_refptr<MediaStreamInterface> stream(
         pc_factory_->CreateLocalMediaStream(label));
@@ -460,6 +731,14 @@
     EXPECT_EQ(PeerConnectionInterface::kHaveRemoteOffer, observer_.state_);
   }
 
+  void CreateAndSetRemoteOffer(const std::string& sdp) {
+    SessionDescriptionInterface* remote_offer =
+        webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer,
+                                         sdp, nullptr);
+    EXPECT_TRUE(DoSetRemoteDescription(remote_offer));
+    EXPECT_EQ(PeerConnectionInterface::kHaveRemoteOffer, observer_.state_);
+  }
+
   void CreateAnswerAsLocalDescription() {
     scoped_ptr<SessionDescriptionInterface> answer;
     ASSERT_TRUE(DoCreateAnswer(answer.use()));
@@ -523,25 +802,25 @@
     EXPECT_TRUE_WAIT(observer_.ice_complete_, kTimeout);
   }
 
-  void CreateAnswerAsRemoteDescription(const std::string& offer) {
+  void CreateAnswerAsRemoteDescription(const std::string& sdp) {
     webrtc::JsepSessionDescription* answer = new webrtc::JsepSessionDescription(
         SessionDescriptionInterface::kAnswer);
-    EXPECT_TRUE(answer->Initialize(offer, NULL));
+    EXPECT_TRUE(answer->Initialize(sdp, NULL));
     EXPECT_TRUE(DoSetRemoteDescription(answer));
     EXPECT_EQ(PeerConnectionInterface::kStable, observer_.state_);
   }
 
-  void CreatePrAnswerAndAnswerAsRemoteDescription(const std::string& offer) {
+  void CreatePrAnswerAndAnswerAsRemoteDescription(const std::string& sdp) {
     webrtc::JsepSessionDescription* pr_answer =
         new webrtc::JsepSessionDescription(
             SessionDescriptionInterface::kPrAnswer);
-    EXPECT_TRUE(pr_answer->Initialize(offer, NULL));
+    EXPECT_TRUE(pr_answer->Initialize(sdp, NULL));
     EXPECT_TRUE(DoSetRemoteDescription(pr_answer));
     EXPECT_EQ(PeerConnectionInterface::kHaveRemotePrAnswer, observer_.state_);
     webrtc::JsepSessionDescription* answer =
         new webrtc::JsepSessionDescription(
             SessionDescriptionInterface::kAnswer);
-    EXPECT_TRUE(answer->Initialize(offer, NULL));
+    EXPECT_TRUE(answer->Initialize(sdp, NULL));
     EXPECT_TRUE(DoSetRemoteDescription(answer));
     EXPECT_EQ(PeerConnectionInterface::kStable, observer_.state_);
   }
@@ -566,10 +845,71 @@
     CreateAnswerAsRemoteDescription(sdp);
   }
 
+  // This function creates a MediaStream with label kStreams[0] and
+  // |number_of_audio_tracks| and |number_of_video_tracks| tracks and the
+  // corresponding SessionDescriptionInterface. The SessionDescriptionInterface
+  // is returned in |desc| and the MediaStream is stored in
+  // |reference_collection_|
+  void CreateSessionDescriptionAndReference(
+      size_t number_of_audio_tracks,
+      size_t number_of_video_tracks,
+      SessionDescriptionInterface** desc) {
+    ASSERT_TRUE(desc != nullptr);
+    ASSERT_LE(number_of_audio_tracks, 2u);
+    ASSERT_LE(number_of_video_tracks, 2u);
+
+    reference_collection_ = StreamCollection::Create();
+    std::string sdp_ms1 = std::string(kSdpStringInit);
+
+    std::string mediastream_label = kStreams[0];
+
+    rtc::scoped_refptr<webrtc::MediaStreamInterface> stream(
+        webrtc::MediaStream::Create(mediastream_label));
+    reference_collection_->AddStream(stream);
+
+    if (number_of_audio_tracks > 0) {
+      sdp_ms1 += std::string(kSdpStringAudio);
+      sdp_ms1 += std::string(kSdpStringMs1Audio0);
+      AddAudioTrack(kAudioTracks[0], stream);
+    }
+    if (number_of_audio_tracks > 1) {
+      sdp_ms1 += kSdpStringMs1Audio1;
+      AddAudioTrack(kAudioTracks[1], stream);
+    }
+
+    if (number_of_video_tracks > 0) {
+      sdp_ms1 += std::string(kSdpStringVideo);
+      sdp_ms1 += std::string(kSdpStringMs1Video0);
+      AddVideoTrack(kVideoTracks[0], stream);
+    }
+    if (number_of_video_tracks > 1) {
+      sdp_ms1 += kSdpStringMs1Video1;
+      AddVideoTrack(kVideoTracks[1], stream);
+    }
+
+    *desc = webrtc::CreateSessionDescription(
+        SessionDescriptionInterface::kOffer, sdp_ms1, nullptr);
+  }
+
+  void AddAudioTrack(const std::string& track_id,
+                     MediaStreamInterface* stream) {
+    rtc::scoped_refptr<webrtc::AudioTrackInterface> audio_track(
+        webrtc::AudioTrack::Create(track_id, nullptr));
+    ASSERT_TRUE(stream->AddTrack(audio_track));
+  }
+
+  void AddVideoTrack(const std::string& track_id,
+                     MediaStreamInterface* stream) {
+    rtc::scoped_refptr<webrtc::VideoTrackInterface> video_track(
+        webrtc::VideoTrack::Create(track_id, nullptr));
+    ASSERT_TRUE(stream->AddTrack(video_track));
+  }
+
   scoped_refptr<FakePortAllocatorFactory> port_allocator_factory_;
   scoped_refptr<webrtc::PeerConnectionFactoryInterface> pc_factory_;
   scoped_refptr<PeerConnectionInterface> pc_;
   MockPeerConnectionObserver observer_;
+  rtc::scoped_refptr<StreamCollection> reference_collection_;
 };
 
 TEST_F(PeerConnectionInterfaceTest,
@@ -579,7 +919,7 @@
 
 TEST_F(PeerConnectionInterfaceTest, AddStreams) {
   CreatePeerConnection();
-  AddStream(kStreamLabel1);
+  AddVideoStream(kStreamLabel1);
   AddVoiceStream(kStreamLabel2);
   ASSERT_EQ(2u, pc_->local_streams()->count());
 
@@ -606,9 +946,54 @@
   EXPECT_EQ(0u, pc_->local_streams()->count());
 }
 
+// Test that the created offer includes streams we added.
+TEST_F(PeerConnectionInterfaceTest, AddedStreamsPresentInOffer) {
+  CreatePeerConnection();
+  AddAudioVideoStream(kStreamLabel1, "audio_track", "video_track");
+  scoped_ptr<SessionDescriptionInterface> offer;
+  ASSERT_TRUE(DoCreateOffer(offer.accept()));
+
+  const cricket::ContentInfo* audio_content =
+      cricket::GetFirstAudioContent(offer->description());
+  const cricket::AudioContentDescription* audio_desc =
+      static_cast<const cricket::AudioContentDescription*>(
+          audio_content->description);
+  EXPECT_TRUE(
+      ContainsTrack(audio_desc->streams(), kStreamLabel1, "audio_track"));
+
+  const cricket::ContentInfo* video_content =
+      cricket::GetFirstVideoContent(offer->description());
+  const cricket::VideoContentDescription* video_desc =
+      static_cast<const cricket::VideoContentDescription*>(
+          video_content->description);
+  EXPECT_TRUE(
+      ContainsTrack(video_desc->streams(), kStreamLabel1, "video_track"));
+
+  // Add another stream and ensure the offer includes both the old and new
+  // streams.
+  AddAudioVideoStream(kStreamLabel2, "audio_track2", "video_track2");
+  ASSERT_TRUE(DoCreateOffer(offer.accept()));
+
+  audio_content = cricket::GetFirstAudioContent(offer->description());
+  audio_desc = static_cast<const cricket::AudioContentDescription*>(
+      audio_content->description);
+  EXPECT_TRUE(
+      ContainsTrack(audio_desc->streams(), kStreamLabel1, "audio_track"));
+  EXPECT_TRUE(
+      ContainsTrack(audio_desc->streams(), kStreamLabel2, "audio_track2"));
+
+  video_content = cricket::GetFirstVideoContent(offer->description());
+  video_desc = static_cast<const cricket::VideoContentDescription*>(
+      video_content->description);
+  EXPECT_TRUE(
+      ContainsTrack(video_desc->streams(), kStreamLabel1, "video_track"));
+  EXPECT_TRUE(
+      ContainsTrack(video_desc->streams(), kStreamLabel2, "video_track2"));
+}
+
 TEST_F(PeerConnectionInterfaceTest, RemoveStream) {
   CreatePeerConnection();
-  AddStream(kStreamLabel1);
+  AddVideoStream(kStreamLabel1);
   ASSERT_EQ(1u, pc_->local_streams()->count());
   pc_->RemoveStream(pc_->local_streams()->at(0));
   EXPECT_EQ(0u, pc_->local_streams()->count());
@@ -622,7 +1007,7 @@
 
 TEST_F(PeerConnectionInterfaceTest, CreateOfferReceivePrAnswerAndAnswer) {
   CreatePeerConnection();
-  AddStream(kStreamLabel1);
+  AddVideoStream(kStreamLabel1);
   CreateOfferAsLocalDescription();
   std::string offer;
   EXPECT_TRUE(pc_->local_description()->ToString(&offer));
@@ -632,7 +1017,7 @@
 
 TEST_F(PeerConnectionInterfaceTest, ReceiveOfferCreateAnswer) {
   CreatePeerConnection();
-  AddStream(kStreamLabel1);
+  AddVideoStream(kStreamLabel1);
 
   CreateOfferAsRemoteDescription();
   CreateAnswerAsLocalDescription();
@@ -642,7 +1027,7 @@
 
 TEST_F(PeerConnectionInterfaceTest, ReceiveOfferCreatePrAnswerAndAnswer) {
   CreatePeerConnection();
-  AddStream(kStreamLabel1);
+  AddVideoStream(kStreamLabel1);
 
   CreateOfferAsRemoteDescription();
   CreatePrAnswerAsLocalDescription();
@@ -657,7 +1042,7 @@
   pc_->RemoveStream(pc_->local_streams()->at(0));
   CreateOfferReceiveAnswer();
   EXPECT_EQ(0u, pc_->remote_streams()->count());
-  AddStream(kStreamLabel1);
+  AddVideoStream(kStreamLabel1);
   CreateOfferReceiveAnswer();
 }
 
@@ -682,7 +1067,7 @@
   EXPECT_FALSE(pc_->AddIceCandidate(observer_.last_candidate_.get()));
   // SetRemoteDescription takes ownership of offer.
   SessionDescriptionInterface* offer = NULL;
-  AddStream(kStreamLabel1);
+  AddVideoStream(kStreamLabel1);
   EXPECT_TRUE(DoCreateOffer(&offer));
   EXPECT_TRUE(DoSetRemoteDescription(offer));
 
@@ -697,7 +1082,7 @@
   EXPECT_TRUE(pc_->AddIceCandidate(observer_.last_candidate_.get()));
 }
 
-// Test that the CreateOffer and CreatAnswer will fail if the track labels are
+// Test that CreateOffer and CreateAnswer will fail if the track labels are
 // not unique.
 TEST_F(PeerConnectionInterfaceTest, CreateOfferAnswerWithInvalidStream) {
   CreatePeerConnection();
@@ -947,6 +1332,22 @@
   EXPECT_TRUE(channel == NULL);
 }
 
+// Verifies that duplicated label is not allowed for RTP data channel.
+TEST_F(PeerConnectionInterfaceTest, RtpDuplicatedLabelNotAllowed) {
+  FakeConstraints constraints;
+  constraints.SetAllowRtpDataChannels();
+  CreatePeerConnection(&constraints);
+
+  std::string label = "test";
+  scoped_refptr<DataChannelInterface> channel =
+      pc_->CreateDataChannel(label, nullptr);
+  EXPECT_NE(channel, nullptr);
+
+  scoped_refptr<DataChannelInterface> dup_channel =
+      pc_->CreateDataChannel(label, nullptr);
+  EXPECT_EQ(dup_channel, nullptr);
+}
+
 // This tests that a SCTP data channel is returned using different
 // DataChannelInit configurations.
 TEST_F(PeerConnectionInterfaceTest, CreateSctpDataChannel) {
@@ -1031,6 +1432,23 @@
   EXPECT_TRUE(channel == NULL);
 }
 
+// Verifies that duplicated label is allowed for SCTP data channel.
+TEST_F(PeerConnectionInterfaceTest, SctpDuplicatedLabelAllowed) {
+  FakeConstraints constraints;
+  constraints.AddMandatory(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
+                           true);
+  CreatePeerConnection(&constraints);
+
+  std::string label = "test";
+  scoped_refptr<DataChannelInterface> channel =
+      pc_->CreateDataChannel(label, nullptr);
+  EXPECT_NE(channel, nullptr);
+
+  scoped_refptr<DataChannelInterface> dup_channel =
+      pc_->CreateDataChannel(label, nullptr);
+  EXPECT_NE(dup_channel, nullptr);
+}
+
 // This test verifies that OnRenegotiationNeeded is fired for every new RTP
 // DataChannel.
 TEST_F(PeerConnectionInterfaceTest, RenegotiationNeededForNewRtpDataChannel) {
@@ -1234,3 +1652,567 @@
   pc_->Close();
   DoGetStats(NULL);
 }
+
+// NOTE: The series of tests below come from what used to be
+// mediastreamsignaling_unittest.cc, and are mostly aimed at testing that
+// setting a remote or local description has the expected effects.
+
+// This test verifies that the remote MediaStreams corresponding to a received
+// SDP string is created. In this test the two separate MediaStreams are
+// signaled.
+TEST_F(PeerConnectionInterfaceTest, UpdateRemoteStreams) {
+  FakeConstraints constraints;
+  constraints.AddMandatory(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
+                           true);
+  CreatePeerConnection(&constraints);
+  CreateAndSetRemoteOffer(kSdpStringWithStream1);
+
+  rtc::scoped_refptr<StreamCollection> reference(CreateStreamCollection(1));
+  EXPECT_TRUE(
+      CompareStreamCollections(observer_.remote_streams(), reference.get()));
+  MediaStreamInterface* remote_stream = observer_.remote_streams()->at(0);
+  EXPECT_TRUE(remote_stream->GetVideoTracks()[0]->GetSource() != nullptr);
+
+  // Create a session description based on another SDP with another
+  // MediaStream.
+  CreateAndSetRemoteOffer(kSdpStringWithStream1And2);
+
+  rtc::scoped_refptr<StreamCollection> reference2(CreateStreamCollection(2));
+  EXPECT_TRUE(
+      CompareStreamCollections(observer_.remote_streams(), reference2.get()));
+}
+
+// This test verifies that when remote tracks are added/removed from SDP, the
+// created remote streams are updated appropriately.
+TEST_F(PeerConnectionInterfaceTest,
+       AddRemoveTrackFromExistingRemoteMediaStream) {
+  FakeConstraints constraints;
+  constraints.AddMandatory(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
+                           true);
+  CreatePeerConnection(&constraints);
+  rtc::scoped_ptr<SessionDescriptionInterface> desc_ms1;
+  CreateSessionDescriptionAndReference(1, 1, desc_ms1.accept());
+  EXPECT_TRUE(DoSetRemoteDescription(desc_ms1.release()));
+  EXPECT_TRUE(CompareStreamCollections(observer_.remote_streams(),
+                                       reference_collection_));
+
+  // Add extra audio and video tracks to the same MediaStream.
+  rtc::scoped_ptr<SessionDescriptionInterface> desc_ms1_two_tracks;
+  CreateSessionDescriptionAndReference(2, 2, desc_ms1_two_tracks.accept());
+  EXPECT_TRUE(DoSetRemoteDescription(desc_ms1_two_tracks.release()));
+  EXPECT_TRUE(CompareStreamCollections(observer_.remote_streams(),
+                                       reference_collection_));
+
+  // Remove the extra audio and video tracks.
+  rtc::scoped_ptr<SessionDescriptionInterface> desc_ms2;
+  CreateSessionDescriptionAndReference(1, 1, desc_ms2.accept());
+  EXPECT_TRUE(DoSetRemoteDescription(desc_ms2.release()));
+  EXPECT_TRUE(CompareStreamCollections(observer_.remote_streams(),
+                                       reference_collection_));
+}
+
+// This tests that remote tracks are ended if a local session description is set
+// that rejects the media content type.
+TEST_F(PeerConnectionInterfaceTest, RejectMediaContent) {
+  FakeConstraints constraints;
+  constraints.AddMandatory(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
+                           true);
+  CreatePeerConnection(&constraints);
+  // First create and set a remote offer, then reject its video content in our
+  // answer.
+  CreateAndSetRemoteOffer(kSdpStringWithStream1);
+  ASSERT_EQ(1u, observer_.remote_streams()->count());
+  MediaStreamInterface* remote_stream = observer_.remote_streams()->at(0);
+  ASSERT_EQ(1u, remote_stream->GetVideoTracks().size());
+  ASSERT_EQ(1u, remote_stream->GetAudioTracks().size());
+
+  rtc::scoped_refptr<webrtc::VideoTrackInterface> remote_video =
+      remote_stream->GetVideoTracks()[0];
+  EXPECT_EQ(webrtc::MediaStreamTrackInterface::kLive, remote_video->state());
+  rtc::scoped_refptr<webrtc::AudioTrackInterface> remote_audio =
+      remote_stream->GetAudioTracks()[0];
+  EXPECT_EQ(webrtc::MediaStreamTrackInterface::kLive, remote_audio->state());
+
+  rtc::scoped_ptr<SessionDescriptionInterface> local_answer;
+  EXPECT_TRUE(DoCreateAnswer(local_answer.accept()));
+  cricket::ContentInfo* video_info =
+      local_answer->description()->GetContentByName("video");
+  video_info->rejected = true;
+  EXPECT_TRUE(DoSetLocalDescription(local_answer.release()));
+  EXPECT_EQ(webrtc::MediaStreamTrackInterface::kEnded, remote_video->state());
+  EXPECT_EQ(webrtc::MediaStreamTrackInterface::kLive, remote_audio->state());
+
+  // Now create an offer where we reject both video and audio.
+  rtc::scoped_ptr<SessionDescriptionInterface> local_offer;
+  EXPECT_TRUE(DoCreateOffer(local_offer.accept()));
+  video_info = local_offer->description()->GetContentByName("video");
+  ASSERT_TRUE(video_info != nullptr);
+  video_info->rejected = true;
+  cricket::ContentInfo* audio_info =
+      local_offer->description()->GetContentByName("audio");
+  ASSERT_TRUE(audio_info != nullptr);
+  audio_info->rejected = true;
+  EXPECT_TRUE(DoSetLocalDescription(local_offer.release()));
+  EXPECT_EQ(webrtc::MediaStreamTrackInterface::kEnded, remote_video->state());
+  EXPECT_EQ(webrtc::MediaStreamTrackInterface::kEnded, remote_audio->state());
+}
+
+// This tests that we won't crash if the remote track has been removed outside
+// of PeerConnection and then PeerConnection tries to reject the track.
+TEST_F(PeerConnectionInterfaceTest, RemoveTrackThenRejectMediaContent) {
+  FakeConstraints constraints;
+  constraints.AddMandatory(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
+                           true);
+  CreatePeerConnection(&constraints);
+  CreateAndSetRemoteOffer(kSdpStringWithStream1);
+  MediaStreamInterface* remote_stream = observer_.remote_streams()->at(0);
+  remote_stream->RemoveTrack(remote_stream->GetVideoTracks()[0]);
+  remote_stream->RemoveTrack(remote_stream->GetAudioTracks()[0]);
+
+  rtc::scoped_ptr<SessionDescriptionInterface> local_answer(
+      webrtc::CreateSessionDescription(SessionDescriptionInterface::kAnswer,
+                                       kSdpStringWithStream1, nullptr));
+  cricket::ContentInfo* video_info =
+      local_answer->description()->GetContentByName("video");
+  video_info->rejected = true;
+  cricket::ContentInfo* audio_info =
+      local_answer->description()->GetContentByName("audio");
+  audio_info->rejected = true;
+  EXPECT_TRUE(DoSetLocalDescription(local_answer.release()));
+
+  // No crash is a pass.
+}
+
+// This tests that a default MediaStream is created if a remote session
+// description doesn't contain any streams and no MSID support.
+// It also tests that the default stream is updated if a video m-line is added
+// in a subsequent session description.
+TEST_F(PeerConnectionInterfaceTest, SdpWithoutMsidCreatesDefaultStream) {
+  FakeConstraints constraints;
+  constraints.AddMandatory(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
+                           true);
+  CreatePeerConnection(&constraints);
+  CreateAndSetRemoteOffer(kSdpStringWithoutStreamsAudioOnly);
+
+  ASSERT_EQ(1u, observer_.remote_streams()->count());
+  MediaStreamInterface* remote_stream = observer_.remote_streams()->at(0);
+
+  EXPECT_EQ(1u, remote_stream->GetAudioTracks().size());
+  EXPECT_EQ(0u, remote_stream->GetVideoTracks().size());
+  EXPECT_EQ("default", remote_stream->label());
+
+  CreateAndSetRemoteOffer(kSdpStringWithoutStreams);
+  ASSERT_EQ(1u, observer_.remote_streams()->count());
+  ASSERT_EQ(1u, remote_stream->GetAudioTracks().size());
+  EXPECT_EQ("defaulta0", remote_stream->GetAudioTracks()[0]->id());
+  ASSERT_EQ(1u, remote_stream->GetVideoTracks().size());
+  EXPECT_EQ("defaultv0", remote_stream->GetVideoTracks()[0]->id());
+}
+
+// This tests that a default MediaStream is created if a remote session
+// description doesn't contain any streams and media direction is send only.
+TEST_F(PeerConnectionInterfaceTest,
+       SendOnlySdpWithoutMsidCreatesDefaultStream) {
+  FakeConstraints constraints;
+  constraints.AddMandatory(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
+                           true);
+  CreatePeerConnection(&constraints);
+  CreateAndSetRemoteOffer(kSdpStringSendOnlyWithoutStreams);
+
+  ASSERT_EQ(1u, observer_.remote_streams()->count());
+  MediaStreamInterface* remote_stream = observer_.remote_streams()->at(0);
+
+  EXPECT_EQ(1u, remote_stream->GetAudioTracks().size());
+  EXPECT_EQ(1u, remote_stream->GetVideoTracks().size());
+  EXPECT_EQ("default", remote_stream->label());
+}
+
+// This tests that it won't crash when PeerConnection tries to remove
+// a remote track that as already been removed from the MediaStream.
+TEST_F(PeerConnectionInterfaceTest, RemoveAlreadyGoneRemoteStream) {
+  FakeConstraints constraints;
+  constraints.AddMandatory(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
+                           true);
+  CreatePeerConnection(&constraints);
+  CreateAndSetRemoteOffer(kSdpStringWithStream1);
+  MediaStreamInterface* remote_stream = observer_.remote_streams()->at(0);
+  remote_stream->RemoveTrack(remote_stream->GetAudioTracks()[0]);
+  remote_stream->RemoveTrack(remote_stream->GetVideoTracks()[0]);
+
+  CreateAndSetRemoteOffer(kSdpStringWithoutStreams);
+
+  // No crash is a pass.
+}
+
+// This tests that a default MediaStream is created if the remote session
+// description doesn't contain any streams and don't contain an indication if
+// MSID is supported.
+TEST_F(PeerConnectionInterfaceTest,
+       SdpWithoutMsidAndStreamsCreatesDefaultStream) {
+  FakeConstraints constraints;
+  constraints.AddMandatory(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
+                           true);
+  CreatePeerConnection(&constraints);
+  CreateAndSetRemoteOffer(kSdpStringWithoutStreams);
+
+  ASSERT_EQ(1u, observer_.remote_streams()->count());
+  MediaStreamInterface* remote_stream = observer_.remote_streams()->at(0);
+  EXPECT_EQ(1u, remote_stream->GetAudioTracks().size());
+  EXPECT_EQ(1u, remote_stream->GetVideoTracks().size());
+}
+
+// This tests that a default MediaStream is not created if the remote session
+// description doesn't contain any streams but does support MSID.
+TEST_F(PeerConnectionInterfaceTest, SdpWithMsidDontCreatesDefaultStream) {
+  FakeConstraints constraints;
+  constraints.AddMandatory(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
+                           true);
+  CreatePeerConnection(&constraints);
+  CreateAndSetRemoteOffer(kSdpStringWithMsidWithoutStreams);
+  EXPECT_EQ(0u, observer_.remote_streams()->count());
+}
+
+// This tests that a default MediaStream is not created if a remote session
+// description is updated to not have any MediaStreams.
+TEST_F(PeerConnectionInterfaceTest, VerifyDefaultStreamIsNotCreated) {
+  FakeConstraints constraints;
+  constraints.AddMandatory(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
+                           true);
+  CreatePeerConnection(&constraints);
+  CreateAndSetRemoteOffer(kSdpStringWithStream1);
+  rtc::scoped_refptr<StreamCollection> reference(CreateStreamCollection(1));
+  EXPECT_TRUE(
+      CompareStreamCollections(observer_.remote_streams(), reference.get()));
+
+  CreateAndSetRemoteOffer(kSdpStringWithoutStreams);
+  EXPECT_EQ(0u, observer_.remote_streams()->count());
+}
+
+// This tests that an RtpSender is created when the local description is set
+// after adding a local stream.
+// TODO(deadbeef): This test and the one below it need to be updated when
+// an RtpSender's lifetime isn't determined by when a local description is set.
+TEST_F(PeerConnectionInterfaceTest, LocalDescriptionChanged) {
+  FakeConstraints constraints;
+  constraints.AddMandatory(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
+                           true);
+  CreatePeerConnection(&constraints);
+  // Create an offer just to ensure we have an identity before we manually
+  // call SetLocalDescription.
+  rtc::scoped_ptr<SessionDescriptionInterface> throwaway;
+  ASSERT_TRUE(DoCreateOffer(throwaway.accept()));
+
+  rtc::scoped_ptr<SessionDescriptionInterface> desc_1;
+  CreateSessionDescriptionAndReference(2, 2, desc_1.accept());
+
+  pc_->AddStream(reference_collection_->at(0));
+  EXPECT_TRUE(DoSetLocalDescription(desc_1.release()));
+  auto senders = pc_->GetSenders();
+  EXPECT_EQ(4u, senders.size());
+  EXPECT_TRUE(ContainsSender(senders, kAudioTracks[0]));
+  EXPECT_TRUE(ContainsSender(senders, kVideoTracks[0]));
+  EXPECT_TRUE(ContainsSender(senders, kAudioTracks[1]));
+  EXPECT_TRUE(ContainsSender(senders, kVideoTracks[1]));
+
+  // Remove an audio and video track.
+  rtc::scoped_ptr<SessionDescriptionInterface> desc_2;
+  CreateSessionDescriptionAndReference(1, 1, desc_2.accept());
+  EXPECT_TRUE(DoSetLocalDescription(desc_2.release()));
+  senders = pc_->GetSenders();
+  EXPECT_EQ(2u, senders.size());
+  EXPECT_TRUE(ContainsSender(senders, kAudioTracks[0]));
+  EXPECT_TRUE(ContainsSender(senders, kVideoTracks[0]));
+  EXPECT_FALSE(ContainsSender(senders, kAudioTracks[1]));
+  EXPECT_FALSE(ContainsSender(senders, kVideoTracks[1]));
+}
+
+// This tests that an RtpSender is created when the local description is set
+// before adding a local stream.
+TEST_F(PeerConnectionInterfaceTest,
+       AddLocalStreamAfterLocalDescriptionChanged) {
+  FakeConstraints constraints;
+  constraints.AddMandatory(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
+                           true);
+  CreatePeerConnection(&constraints);
+  // Create an offer just to ensure we have an identity before we manually
+  // call SetLocalDescription.
+  rtc::scoped_ptr<SessionDescriptionInterface> throwaway;
+  ASSERT_TRUE(DoCreateOffer(throwaway.accept()));
+
+  rtc::scoped_ptr<SessionDescriptionInterface> desc_1;
+  CreateSessionDescriptionAndReference(2, 2, desc_1.accept());
+
+  EXPECT_TRUE(DoSetLocalDescription(desc_1.release()));
+  auto senders = pc_->GetSenders();
+  EXPECT_EQ(0u, senders.size());
+
+  pc_->AddStream(reference_collection_->at(0));
+  senders = pc_->GetSenders();
+  EXPECT_EQ(4u, senders.size());
+  EXPECT_TRUE(ContainsSender(senders, kAudioTracks[0]));
+  EXPECT_TRUE(ContainsSender(senders, kVideoTracks[0]));
+  EXPECT_TRUE(ContainsSender(senders, kAudioTracks[1]));
+  EXPECT_TRUE(ContainsSender(senders, kVideoTracks[1]));
+}
+
+// This tests that the expected behavior occurs if the SSRC on a local track is
+// changed when SetLocalDescription is called.
+TEST_F(PeerConnectionInterfaceTest,
+       ChangeSsrcOnTrackInLocalSessionDescription) {
+  FakeConstraints constraints;
+  constraints.AddMandatory(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
+                           true);
+  CreatePeerConnection(&constraints);
+  // Create an offer just to ensure we have an identity before we manually
+  // call SetLocalDescription.
+  rtc::scoped_ptr<SessionDescriptionInterface> throwaway;
+  ASSERT_TRUE(DoCreateOffer(throwaway.accept()));
+
+  rtc::scoped_ptr<SessionDescriptionInterface> desc;
+  CreateSessionDescriptionAndReference(1, 1, desc.accept());
+  std::string sdp;
+  desc->ToString(&sdp);
+
+  pc_->AddStream(reference_collection_->at(0));
+  EXPECT_TRUE(DoSetLocalDescription(desc.release()));
+  auto senders = pc_->GetSenders();
+  EXPECT_EQ(2u, senders.size());
+  EXPECT_TRUE(ContainsSender(senders, kAudioTracks[0]));
+  EXPECT_TRUE(ContainsSender(senders, kVideoTracks[0]));
+
+  // Change the ssrc of the audio and video track.
+  std::string ssrc_org = "a=ssrc:1";
+  std::string ssrc_to = "a=ssrc:97";
+  rtc::replace_substrs(ssrc_org.c_str(), ssrc_org.length(), ssrc_to.c_str(),
+                       ssrc_to.length(), &sdp);
+  ssrc_org = "a=ssrc:2";
+  ssrc_to = "a=ssrc:98";
+  rtc::replace_substrs(ssrc_org.c_str(), ssrc_org.length(), ssrc_to.c_str(),
+                       ssrc_to.length(), &sdp);
+  rtc::scoped_ptr<SessionDescriptionInterface> updated_desc(
+      webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer, sdp,
+                                       nullptr));
+
+  EXPECT_TRUE(DoSetLocalDescription(updated_desc.release()));
+  senders = pc_->GetSenders();
+  EXPECT_EQ(2u, senders.size());
+  EXPECT_TRUE(ContainsSender(senders, kAudioTracks[0]));
+  EXPECT_TRUE(ContainsSender(senders, kVideoTracks[0]));
+  // TODO(deadbeef): Once RtpSenders expose parameters, check that the SSRC
+  // changed.
+}
+
+// This tests that the expected behavior occurs if a new session description is
+// set with the same tracks, but on a different MediaStream.
+TEST_F(PeerConnectionInterfaceTest, SignalSameTracksInSeparateMediaStream) {
+  FakeConstraints constraints;
+  constraints.AddMandatory(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
+                           true);
+  CreatePeerConnection(&constraints);
+  // Create an offer just to ensure we have an identity before we manually
+  // call SetLocalDescription.
+  rtc::scoped_ptr<SessionDescriptionInterface> throwaway;
+  ASSERT_TRUE(DoCreateOffer(throwaway.accept()));
+
+  rtc::scoped_ptr<SessionDescriptionInterface> desc;
+  CreateSessionDescriptionAndReference(1, 1, desc.accept());
+  std::string sdp;
+  desc->ToString(&sdp);
+
+  pc_->AddStream(reference_collection_->at(0));
+  EXPECT_TRUE(DoSetLocalDescription(desc.release()));
+  auto senders = pc_->GetSenders();
+  EXPECT_EQ(2u, senders.size());
+  EXPECT_TRUE(ContainsSender(senders, kAudioTracks[0]));
+  EXPECT_TRUE(ContainsSender(senders, kVideoTracks[0]));
+
+  // Add a new MediaStream but with the same tracks as in the first stream.
+  rtc::scoped_refptr<webrtc::MediaStreamInterface> stream_1(
+      webrtc::MediaStream::Create(kStreams[1]));
+  stream_1->AddTrack(reference_collection_->at(0)->GetVideoTracks()[0]);
+  stream_1->AddTrack(reference_collection_->at(0)->GetAudioTracks()[0]);
+  pc_->AddStream(stream_1);
+
+  // Replace msid in the original SDP.
+  rtc::replace_substrs(kStreams[0], strlen(kStreams[0]), kStreams[1],
+                       strlen(kStreams[1]), &sdp);
+
+  rtc::scoped_ptr<SessionDescriptionInterface> updated_desc(
+      webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer, sdp,
+                                       nullptr));
+
+  EXPECT_TRUE(DoSetLocalDescription(updated_desc.release()));
+  senders = pc_->GetSenders();
+  EXPECT_EQ(2u, senders.size());
+  EXPECT_TRUE(ContainsSender(senders, kAudioTracks[0]));
+  EXPECT_TRUE(ContainsSender(senders, kVideoTracks[0]));
+}
+
+// The following tests verify that session options are created correctly.
+
+TEST(CreateSessionOptionsTest, GetOptionsForOfferWithInvalidAudioOption) {
+  RTCOfferAnswerOptions rtc_options;
+  rtc_options.offer_to_receive_audio = RTCOfferAnswerOptions::kUndefined - 1;
+
+  cricket::MediaSessionOptions options;
+  EXPECT_FALSE(ConvertRtcOptionsForOffer(rtc_options, &options));
+
+  rtc_options.offer_to_receive_audio =
+      RTCOfferAnswerOptions::kMaxOfferToReceiveMedia + 1;
+  EXPECT_FALSE(ConvertRtcOptionsForOffer(rtc_options, &options));
+}
+
+TEST(CreateSessionOptionsTest, GetOptionsForOfferWithInvalidVideoOption) {
+  RTCOfferAnswerOptions rtc_options;
+  rtc_options.offer_to_receive_video = RTCOfferAnswerOptions::kUndefined - 1;
+
+  cricket::MediaSessionOptions options;
+  EXPECT_FALSE(ConvertRtcOptionsForOffer(rtc_options, &options));
+
+  rtc_options.offer_to_receive_video =
+      RTCOfferAnswerOptions::kMaxOfferToReceiveMedia + 1;
+  EXPECT_FALSE(ConvertRtcOptionsForOffer(rtc_options, &options));
+}
+
+// Test that a MediaSessionOptions is created for an offer if
+// OfferToReceiveAudio and OfferToReceiveVideo options are set but no
+// MediaStreams are sent.
+TEST(CreateSessionOptionsTest, GetMediaSessionOptionsForOfferWithAudioVideo) {
+  RTCOfferAnswerOptions rtc_options;
+  rtc_options.offer_to_receive_audio = 1;
+  rtc_options.offer_to_receive_video = 1;
+
+  cricket::MediaSessionOptions options;
+  EXPECT_TRUE(ConvertRtcOptionsForOffer(rtc_options, &options));
+  EXPECT_TRUE(options.has_audio());
+  EXPECT_TRUE(options.has_video());
+  EXPECT_TRUE(options.bundle_enabled);
+}
+
+// Test that a correct MediaSessionOptions is created for an offer if
+// OfferToReceiveAudio is set but no MediaStreams are sent.
+TEST(CreateSessionOptionsTest, GetMediaSessionOptionsForOfferWithAudio) {
+  RTCOfferAnswerOptions rtc_options;
+  rtc_options.offer_to_receive_audio = 1;
+
+  cricket::MediaSessionOptions options;
+  EXPECT_TRUE(ConvertRtcOptionsForOffer(rtc_options, &options));
+  EXPECT_TRUE(options.has_audio());
+  EXPECT_FALSE(options.has_video());
+  EXPECT_TRUE(options.bundle_enabled);
+}
+
+// Test that a correct MediaSessionOptions is created for an offer if
+// the default OfferOptons is used or MediaStreams are sent.
+TEST(CreateSessionOptionsTest, GetDefaultMediaSessionOptionsForOffer) {
+  RTCOfferAnswerOptions rtc_options;
+
+  cricket::MediaSessionOptions options;
+  EXPECT_TRUE(ConvertRtcOptionsForOffer(rtc_options, &options));
+  EXPECT_FALSE(options.has_audio());
+  EXPECT_FALSE(options.has_video());
+  EXPECT_FALSE(options.bundle_enabled);
+  EXPECT_TRUE(options.vad_enabled);
+  EXPECT_FALSE(options.transport_options.ice_restart);
+}
+
+// Test that a correct MediaSessionOptions is created for an offer if
+// OfferToReceiveVideo is set but no MediaStreams are sent.
+TEST(CreateSessionOptionsTest, GetMediaSessionOptionsForOfferWithVideo) {
+  RTCOfferAnswerOptions rtc_options;
+  rtc_options.offer_to_receive_audio = 0;
+  rtc_options.offer_to_receive_video = 1;
+
+  cricket::MediaSessionOptions options;
+  EXPECT_TRUE(ConvertRtcOptionsForOffer(rtc_options, &options));
+  EXPECT_FALSE(options.has_audio());
+  EXPECT_TRUE(options.has_video());
+  EXPECT_TRUE(options.bundle_enabled);
+}
+
+// Test that a correct MediaSessionOptions is created for an offer if
+// UseRtpMux is set to false.
+TEST(CreateSessionOptionsTest,
+     GetMediaSessionOptionsForOfferWithBundleDisabled) {
+  RTCOfferAnswerOptions rtc_options;
+  rtc_options.offer_to_receive_audio = 1;
+  rtc_options.offer_to_receive_video = 1;
+  rtc_options.use_rtp_mux = false;
+
+  cricket::MediaSessionOptions options;
+  EXPECT_TRUE(ConvertRtcOptionsForOffer(rtc_options, &options));
+  EXPECT_TRUE(options.has_audio());
+  EXPECT_TRUE(options.has_video());
+  EXPECT_FALSE(options.bundle_enabled);
+}
+
+// Test that a correct MediaSessionOptions is created to restart ice if
+// IceRestart is set. It also tests that subsequent MediaSessionOptions don't
+// have |transport_options.ice_restart| set.
+TEST(CreateSessionOptionsTest, GetMediaSessionOptionsForOfferWithIceRestart) {
+  RTCOfferAnswerOptions rtc_options;
+  rtc_options.ice_restart = true;
+
+  cricket::MediaSessionOptions options;
+  EXPECT_TRUE(ConvertRtcOptionsForOffer(rtc_options, &options));
+  EXPECT_TRUE(options.transport_options.ice_restart);
+
+  rtc_options = RTCOfferAnswerOptions();
+  EXPECT_TRUE(ConvertRtcOptionsForOffer(rtc_options, &options));
+  EXPECT_FALSE(options.transport_options.ice_restart);
+}
+
+// Test that the MediaConstraints in an answer don't affect if audio and video
+// is offered in an offer but that if kOfferToReceiveAudio or
+// kOfferToReceiveVideo constraints are true in an offer, the media type will be
+// included in subsequent answers.
+TEST(CreateSessionOptionsTest, MediaConstraintsInAnswer) {
+  FakeConstraints answer_c;
+  answer_c.SetMandatoryReceiveAudio(true);
+  answer_c.SetMandatoryReceiveVideo(true);
+
+  cricket::MediaSessionOptions answer_options;
+  EXPECT_TRUE(ParseConstraintsForAnswer(&answer_c, &answer_options));
+  EXPECT_TRUE(answer_options.has_audio());
+  EXPECT_TRUE(answer_options.has_video());
+
+  RTCOfferAnswerOptions rtc_offer_optoins;
+
+  cricket::MediaSessionOptions offer_options;
+  EXPECT_TRUE(ConvertRtcOptionsForOffer(rtc_offer_optoins, &offer_options));
+  EXPECT_FALSE(offer_options.has_audio());
+  EXPECT_FALSE(offer_options.has_video());
+
+  RTCOfferAnswerOptions updated_rtc_offer_optoins;
+  updated_rtc_offer_optoins.offer_to_receive_audio = 1;
+  updated_rtc_offer_optoins.offer_to_receive_video = 1;
+
+  cricket::MediaSessionOptions updated_offer_options;
+  EXPECT_TRUE(ConvertRtcOptionsForOffer(updated_rtc_offer_optoins,
+                                        &updated_offer_options));
+  EXPECT_TRUE(updated_offer_options.has_audio());
+  EXPECT_TRUE(updated_offer_options.has_video());
+
+  // Since an offer has been created with both audio and video, subsequent
+  // offers and answers should contain both audio and video.
+  // Answers will only contain the media types that exist in the offer
+  // regardless of the value of |updated_answer_options.has_audio| and
+  // |updated_answer_options.has_video|.
+  FakeConstraints updated_answer_c;
+  answer_c.SetMandatoryReceiveAudio(false);
+  answer_c.SetMandatoryReceiveVideo(false);
+
+  cricket::MediaSessionOptions updated_answer_options;
+  EXPECT_TRUE(
+      ParseConstraintsForAnswer(&updated_answer_c, &updated_answer_options));
+  EXPECT_TRUE(updated_answer_options.has_audio());
+  EXPECT_TRUE(updated_answer_options.has_video());
+
+  RTCOfferAnswerOptions default_rtc_options;
+  EXPECT_TRUE(
+      ConvertRtcOptionsForOffer(default_rtc_options, &updated_offer_options));
+  // By default, |has_audio| or |has_video| are false if there is no media
+  // track.
+  EXPECT_FALSE(updated_offer_options.has_audio());
+  EXPECT_FALSE(updated_offer_options.has_video());
+}
diff --git a/talk/app/webrtc/sctputils.cc b/talk/app/webrtc/sctputils.cc
index a643837..2239599 100644
--- a/talk/app/webrtc/sctputils.cc
+++ b/talk/app/webrtc/sctputils.cc
@@ -48,6 +48,19 @@
   DCOMCT_UNORDERED_PARTIAL_TIME = 0x82,
 };
 
+bool IsOpenMessage(const rtc::Buffer& payload) {
+  // Format defined at
+  // http://tools.ietf.org/html/draft-jesup-rtcweb-data-protocol-04
+
+  rtc::ByteBuffer buffer(payload);
+  uint8_t message_type;
+  if (!buffer.ReadUInt8(&message_type)) {
+    LOG(LS_WARNING) << "Could not read OPEN message type.";
+    return false;
+  }
+  return message_type == DATA_CHANNEL_OPEN_MESSAGE_TYPE;
+}
+
 bool ParseDataChannelOpenMessage(const rtc::Buffer& payload,
                                  std::string* label,
                                  DataChannelInit* config) {
diff --git a/talk/app/webrtc/sctputils.h b/talk/app/webrtc/sctputils.h
index ab1818b..f16873c 100644
--- a/talk/app/webrtc/sctputils.h
+++ b/talk/app/webrtc/sctputils.h
@@ -39,6 +39,9 @@
 namespace webrtc {
 struct DataChannelInit;
 
+// Read the message type and return true if it's an OPEN message.
+bool IsOpenMessage(const rtc::Buffer& payload);
+
 bool ParseDataChannelOpenMessage(const rtc::Buffer& payload,
                                  std::string* label,
                                  DataChannelInit* config);
diff --git a/talk/app/webrtc/sctputils_unittest.cc b/talk/app/webrtc/sctputils_unittest.cc
index e5f323a..e0e203f 100644
--- a/talk/app/webrtc/sctputils_unittest.cc
+++ b/talk/app/webrtc/sctputils_unittest.cc
@@ -1,6 +1,6 @@
 /*
  * libjingle
- * Copyright 2013 Google Inc
+ * Copyright 2013 Google Inc.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions are met:
@@ -159,3 +159,20 @@
 
   EXPECT_TRUE(webrtc::ParseDataChannelOpenAckMessage(packet));
 }
+
+TEST_F(SctpUtilsTest, TestIsOpenMessage) {
+  rtc::ByteBuffer open;
+  open.WriteUInt8(0x03);
+  EXPECT_TRUE(webrtc::IsOpenMessage(open));
+
+  rtc::ByteBuffer openAck;
+  openAck.WriteUInt8(0x02);
+  EXPECT_FALSE(webrtc::IsOpenMessage(open));
+
+  rtc::ByteBuffer invalid;
+  openAck.WriteUInt8(0x01);
+  EXPECT_FALSE(webrtc::IsOpenMessage(invalid));
+
+  rtc::ByteBuffer empty;
+  EXPECT_FALSE(webrtc::IsOpenMessage(empty));
+}
diff --git a/talk/app/webrtc/statscollector.cc b/talk/app/webrtc/statscollector.cc
index 5b527ec..9ba7da8 100644
--- a/talk/app/webrtc/statscollector.cc
+++ b/talk/app/webrtc/statscollector.cc
@@ -30,6 +30,7 @@
 #include <utility>
 #include <vector>
 
+#include "talk/app/webrtc/peerconnection.h"
 #include "talk/session/media/channel.h"
 #include "webrtc/base/base64.h"
 #include "webrtc/base/checks.h"
@@ -356,14 +357,13 @@
   }
 }
 
-StatsCollector::StatsCollector(WebRtcSession* session)
-    : session_(session),
-      stats_gathering_started_(0) {
-  RTC_DCHECK(session_);
+StatsCollector::StatsCollector(PeerConnection* pc)
+    : pc_(pc), stats_gathering_started_(0) {
+  RTC_DCHECK(pc_);
 }
 
 StatsCollector::~StatsCollector() {
-  RTC_DCHECK(session_->signaling_thread()->IsCurrent());
+  RTC_DCHECK(pc_->session()->signaling_thread()->IsCurrent());
 }
 
 double StatsCollector::GetTimeNow() {
@@ -373,7 +373,7 @@
 // Adds a MediaStream with tracks that can be used as a |selector| in a call
 // to GetStats.
 void StatsCollector::AddStream(MediaStreamInterface* stream) {
-  RTC_DCHECK(session_->signaling_thread()->IsCurrent());
+  RTC_DCHECK(pc_->session()->signaling_thread()->IsCurrent());
   RTC_DCHECK(stream != NULL);
 
   CreateTrackReports<AudioTrackVector>(stream->GetAudioTracks(),
@@ -384,7 +384,7 @@
 
 void StatsCollector::AddLocalAudioTrack(AudioTrackInterface* audio_track,
                                         uint32_t ssrc) {
-  RTC_DCHECK(session_->signaling_thread()->IsCurrent());
+  RTC_DCHECK(pc_->session()->signaling_thread()->IsCurrent());
   RTC_DCHECK(audio_track != NULL);
 #if (!defined(NDEBUG) || defined(DCHECK_ALWAYS_ON))
   for (const auto& track : local_audio_tracks_)
@@ -416,7 +416,7 @@
 
 void StatsCollector::GetStats(MediaStreamTrackInterface* track,
                               StatsReports* reports) {
-  RTC_DCHECK(session_->signaling_thread()->IsCurrent());
+  RTC_DCHECK(pc_->session()->signaling_thread()->IsCurrent());
   RTC_DCHECK(reports != NULL);
   RTC_DCHECK(reports->empty());
 
@@ -430,7 +430,7 @@
   }
 
   StatsReport* report = reports_.Find(StatsReport::NewTypedId(
-      StatsReport::kStatsReportTypeSession, session_->id()));
+      StatsReport::kStatsReportTypeSession, pc_->session()->id()));
   if (report)
     reports->push_back(report);
 
@@ -456,7 +456,7 @@
 
 void
 StatsCollector::UpdateStats(PeerConnectionInterface::StatsOutputLevel level) {
-  RTC_DCHECK(session_->signaling_thread()->IsCurrent());
+  RTC_DCHECK(pc_->session()->signaling_thread()->IsCurrent());
   double time_now = GetTimeNow();
   // Calls to UpdateStats() that occur less than kMinGatherStatsPeriod number of
   // ms apart will be ignored.
@@ -467,7 +467,7 @@
   }
   stats_gathering_started_ = time_now;
 
-  if (session_) {
+  if (pc_->session()) {
     // TODO(tommi): All of these hop over to the worker thread to fetch
     // information.  We could use an AsyncInvoker to run all of these and post
     // the information back to the signaling thread where we can create and
@@ -482,11 +482,12 @@
   }
 }
 
-StatsReport* StatsCollector::PrepareReport(bool local,
-                                           uint32_t ssrc,
-                                           const StatsReport::Id& transport_id,
-                                           StatsReport::Direction direction) {
-  RTC_DCHECK(session_->signaling_thread()->IsCurrent());
+StatsReport* StatsCollector::PrepareReport(
+    bool local,
+    uint32_t ssrc,
+    const StatsReport::Id& transport_id,
+    StatsReport::Direction direction) {
+  RTC_DCHECK(pc_->session()->signaling_thread()->IsCurrent());
   StatsReport::Id id(StatsReport::NewIdWithDirection(
       local ? StatsReport::kStatsReportTypeSsrc
             : StatsReport::kStatsReportTypeRemoteSsrc,
@@ -525,7 +526,7 @@
 
 StatsReport* StatsCollector::AddOneCertificateReport(
     const rtc::SSLCertificate* cert, const StatsReport* issuer) {
-  RTC_DCHECK(session_->signaling_thread()->IsCurrent());
+  RTC_DCHECK(pc_->session()->signaling_thread()->IsCurrent());
 
   // TODO(bemasc): Move this computation to a helper class that caches these
   // values to reduce CPU use in GetStats.  This will require adding a fast
@@ -568,7 +569,7 @@
 
 StatsReport* StatsCollector::AddCertificateReports(
     const rtc::SSLCertificate* cert) {
-  RTC_DCHECK(session_->signaling_thread()->IsCurrent());
+  RTC_DCHECK(pc_->session()->signaling_thread()->IsCurrent());
   // Produces a chain of StatsReports representing this certificate and the rest
   // of its chain, and adds those reports to |reports_|.  The return value is
   // the id of the leaf report.  The provided cert must be non-null, so at least
@@ -668,18 +669,18 @@
 }
 
 void StatsCollector::ExtractSessionInfo() {
-  RTC_DCHECK(session_->signaling_thread()->IsCurrent());
+  RTC_DCHECK(pc_->session()->signaling_thread()->IsCurrent());
 
   // Extract information from the base session.
   StatsReport::Id id(StatsReport::NewTypedId(
-      StatsReport::kStatsReportTypeSession, session_->id()));
+      StatsReport::kStatsReportTypeSession, pc_->session()->id()));
   StatsReport* report = reports_.ReplaceOrAddNew(id);
   report->set_timestamp(stats_gathering_started_);
   report->AddBoolean(StatsReport::kStatsValueNameInitiator,
-                     session_->initiator());
+                     pc_->session()->initiator());
 
   cricket::SessionStats stats;
-  if (!session_->GetTransportStats(&stats)) {
+  if (!pc_->session()->GetTransportStats(&stats)) {
     return;
   }
 
@@ -698,16 +699,16 @@
     //
     StatsReport::Id local_cert_report_id, remote_cert_report_id;
     rtc::scoped_refptr<rtc::RTCCertificate> certificate;
-    if (session_->GetLocalCertificate(transport_iter.second.transport_name,
-                                      &certificate)) {
+    if (pc_->session()->GetLocalCertificate(
+            transport_iter.second.transport_name, &certificate)) {
       StatsReport* r = AddCertificateReports(&(certificate->ssl_certificate()));
       if (r)
         local_cert_report_id = r->id();
     }
 
     rtc::scoped_ptr<rtc::SSLCertificate> cert;
-    if (session_->GetRemoteSSLCertificate(transport_iter.second.transport_name,
-                                          cert.accept())) {
+    if (pc_->session()->GetRemoteSSLCertificate(
+            transport_iter.second.transport_name, cert.accept())) {
       StatsReport* r = AddCertificateReports(cert.get());
       if (r)
         remote_cert_report_id = r->id();
@@ -758,13 +759,13 @@
 }
 
 void StatsCollector::ExtractVoiceInfo() {
-  RTC_DCHECK(session_->signaling_thread()->IsCurrent());
+  RTC_DCHECK(pc_->session()->signaling_thread()->IsCurrent());
 
-  if (!session_->voice_channel()) {
+  if (!pc_->session()->voice_channel()) {
     return;
   }
   cricket::VoiceMediaInfo voice_info;
-  if (!session_->voice_channel()->GetStats(&voice_info)) {
+  if (!pc_->session()->voice_channel()->GetStats(&voice_info)) {
     LOG(LS_ERROR) << "Failed to get voice channel stats.";
     return;
   }
@@ -773,11 +774,11 @@
   // results back to the signaling thread, where we can add data to the reports.
   rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
 
-  StatsReport::Id transport_id(GetTransportIdFromProxy(proxy_to_transport_,
-      session_->voice_channel()->content_name()));
+  StatsReport::Id transport_id(GetTransportIdFromProxy(
+      proxy_to_transport_, pc_->session()->voice_channel()->content_name()));
   if (!transport_id.get()) {
     LOG(LS_ERROR) << "Failed to get transport name for proxy "
-                  << session_->voice_channel()->content_name();
+                  << pc_->session()->voice_channel()->content_name();
     return;
   }
 
@@ -791,13 +792,13 @@
 
 void StatsCollector::ExtractVideoInfo(
     PeerConnectionInterface::StatsOutputLevel level) {
-  RTC_DCHECK(session_->signaling_thread()->IsCurrent());
+  RTC_DCHECK(pc_->session()->signaling_thread()->IsCurrent());
 
-  if (!session_->video_channel())
+  if (!pc_->session()->video_channel())
     return;
 
   cricket::VideoMediaInfo video_info;
-  if (!session_->video_channel()->GetStats(&video_info)) {
+  if (!pc_->session()->video_channel()->GetStats(&video_info)) {
     LOG(LS_ERROR) << "Failed to get video channel stats.";
     return;
   }
@@ -806,11 +807,11 @@
   // results back to the signaling thread, where we can add data to the reports.
   rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
 
-  StatsReport::Id transport_id(GetTransportIdFromProxy(proxy_to_transport_,
-      session_->video_channel()->content_name()));
+  StatsReport::Id transport_id(GetTransportIdFromProxy(
+      proxy_to_transport_, pc_->session()->video_channel()->content_name()));
   if (!transport_id.get()) {
     LOG(LS_ERROR) << "Failed to get transport name for proxy "
-                  << session_->video_channel()->content_name();
+                  << pc_->session()->video_channel()->content_name();
     return;
   }
   ExtractStatsFromList(video_info.receivers, transport_id, this,
@@ -828,12 +829,11 @@
 }
 
 void StatsCollector::ExtractDataInfo() {
-  RTC_DCHECK(session_->signaling_thread()->IsCurrent());
+  RTC_DCHECK(pc_->session()->signaling_thread()->IsCurrent());
 
   rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
 
-  for (const auto& dc :
-           session_->mediastream_signaling()->sctp_data_channels()) {
+  for (const auto& dc : pc_->sctp_data_channels()) {
     StatsReport::Id id(StatsReport::NewTypedIntId(
         StatsReport::kStatsReportTypeDataChannel, dc->id()));
     StatsReport* report = reports_.ReplaceOrAddNew(id);
@@ -849,14 +849,14 @@
 StatsReport* StatsCollector::GetReport(const StatsReport::StatsType& type,
                                        const std::string& id,
                                        StatsReport::Direction direction) {
-  RTC_DCHECK(session_->signaling_thread()->IsCurrent());
+  RTC_DCHECK(pc_->session()->signaling_thread()->IsCurrent());
   RTC_DCHECK(type == StatsReport::kStatsReportTypeSsrc ||
              type == StatsReport::kStatsReportTypeRemoteSsrc);
   return reports_.Find(StatsReport::NewIdWithDirection(type, id, direction));
 }
 
 void StatsCollector::UpdateStatsFromExistingLocalAudioTracks() {
-  RTC_DCHECK(session_->signaling_thread()->IsCurrent());
+  RTC_DCHECK(pc_->session()->signaling_thread()->IsCurrent());
   // Loop through the existing local audio tracks.
   for (const auto& it : local_audio_tracks_) {
     AudioTrackInterface* track = it.first;
@@ -884,7 +884,7 @@
 
 void StatsCollector::UpdateReportFromAudioTrack(AudioTrackInterface* track,
                                                 StatsReport* report) {
-  RTC_DCHECK(session_->signaling_thread()->IsCurrent());
+  RTC_DCHECK(pc_->session()->signaling_thread()->IsCurrent());
   RTC_DCHECK(track != NULL);
 
   int signal_level = 0;
@@ -907,16 +907,16 @@
 bool StatsCollector::GetTrackIdBySsrc(uint32_t ssrc,
                                       std::string* track_id,
                                       StatsReport::Direction direction) {
-  RTC_DCHECK(session_->signaling_thread()->IsCurrent());
+  RTC_DCHECK(pc_->session()->signaling_thread()->IsCurrent());
   if (direction == StatsReport::kSend) {
-    if (!session_->GetLocalTrackIdBySsrc(ssrc, track_id)) {
+    if (!pc_->session()->GetLocalTrackIdBySsrc(ssrc, track_id)) {
       LOG(LS_WARNING) << "The SSRC " << ssrc
                       << " is not associated with a sending track";
       return false;
     }
   } else {
     RTC_DCHECK(direction == StatsReport::kReceive);
-    if (!session_->GetRemoteTrackIdBySsrc(ssrc, track_id)) {
+    if (!pc_->session()->GetRemoteTrackIdBySsrc(ssrc, track_id)) {
       LOG(LS_WARNING) << "The SSRC " << ssrc
                       << " is not associated with a receiving track";
       return false;
@@ -927,7 +927,7 @@
 }
 
 void StatsCollector::UpdateTrackReports() {
-  RTC_DCHECK(session_->signaling_thread()->IsCurrent());
+  RTC_DCHECK(pc_->session()->signaling_thread()->IsCurrent());
 
   rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
 
diff --git a/talk/app/webrtc/statscollector.h b/talk/app/webrtc/statscollector.h
index add26c6..714f15a 100644
--- a/talk/app/webrtc/statscollector.h
+++ b/talk/app/webrtc/statscollector.h
@@ -43,6 +43,8 @@
 
 namespace webrtc {
 
+class PeerConnection;
+
 // Conversion function to convert candidate type string to the corresponding one
 // from  enum RTCStatsIceCandidateType.
 const char* IceCandidateTypeToStatsType(const std::string& candidate_type);
@@ -57,9 +59,9 @@
 
 class StatsCollector {
  public:
-  // The caller is responsible for ensuring that the session outlives the
+  // The caller is responsible for ensuring that the pc outlives the
   // StatsCollector instance.
-  explicit StatsCollector(WebRtcSession* session);
+  explicit StatsCollector(PeerConnection* pc);
   virtual ~StatsCollector();
 
   // Adds a MediaStream with tracks that can be used as a |selector| in a call
@@ -151,8 +153,8 @@
   // A collection for all of our stats reports.
   StatsCollection reports_;
   TrackIdMap track_ids_;
-  // Raw pointer to the session the statistics are gathered from.
-  WebRtcSession* const session_;
+  // Raw pointer to the peer connection the statistics are gathered from.
+  PeerConnection* const pc_;
   double stats_gathering_started_;
   cricket::ProxyTransportMap proxy_to_transport_;
 
diff --git a/talk/app/webrtc/statscollector_unittest.cc b/talk/app/webrtc/statscollector_unittest.cc
index 21f9df8..49b992c 100644
--- a/talk/app/webrtc/statscollector_unittest.cc
+++ b/talk/app/webrtc/statscollector_unittest.cc
@@ -31,12 +31,13 @@
 
 #include "talk/app/webrtc/statscollector.h"
 
+#include "talk/app/webrtc/peerconnection.h"
+#include "talk/app/webrtc/peerconnectionfactory.h"
 #include "talk/app/webrtc/mediastream.h"
 #include "talk/app/webrtc/mediastreaminterface.h"
 #include "talk/app/webrtc/mediastreamsignaling.h"
 #include "talk/app/webrtc/mediastreamtrack.h"
 #include "talk/app/webrtc/test/fakedatachannelprovider.h"
-#include "talk/app/webrtc/test/fakemediastreamsignaling.h"
 #include "talk/app/webrtc/videotrack.h"
 #include "talk/media/base/fakemediaengine.h"
 #include "talk/session/media/channelmanager.h"
@@ -54,6 +55,7 @@
 using testing::Field;
 using testing::Return;
 using testing::ReturnNull;
+using testing::ReturnRef;
 using testing::SetArgPointee;
 using webrtc::PeerConnectionInterface;
 using webrtc::StatsReport;
@@ -83,12 +85,12 @@
 class MockWebRtcSession : public webrtc::WebRtcSession {
  public:
   explicit MockWebRtcSession(cricket::ChannelManager* channel_manager)
-    : WebRtcSession(channel_manager, rtc::Thread::Current(),
-                    rtc::Thread::Current(), NULL, NULL) {
-  }
+      : WebRtcSession(channel_manager,
+                      rtc::Thread::Current(),
+                      rtc::Thread::Current(),
+                      nullptr) {}
   MOCK_METHOD0(voice_channel, cricket::VoiceChannel*());
   MOCK_METHOD0(video_channel, cricket::VideoChannel*());
-  MOCK_CONST_METHOD0(mediastream_signaling, const MediaStreamSignaling*());
   // Libjingle uses "local" for a outgoing track, and "remote" for a incoming
   // track.
   MOCK_METHOD2(GetLocalTrackIdBySsrc, bool(uint32_t, std::string*));
@@ -102,6 +104,21 @@
                     rtc::SSLCertificate** cert));
 };
 
+// The factory isn't really used; it just satisfies the base PeerConnection.
+class FakePeerConnectionFactory
+    : public rtc::RefCountedObject<PeerConnectionFactory> {};
+
+class MockPeerConnection
+    : public rtc::RefCountedObject<webrtc::PeerConnection> {
+ public:
+  MockPeerConnection()
+      : rtc::RefCountedObject<webrtc::PeerConnection>(
+            new FakePeerConnectionFactory()) {}
+  MOCK_METHOD0(session, WebRtcSession*());
+  MOCK_CONST_METHOD0(sctp_data_channels,
+                     const std::vector<rtc::scoped_refptr<DataChannel>>&());
+};
+
 class MockVideoMediaChannel : public cricket::FakeVideoMediaChannel {
  public:
   MockVideoMediaChannel() :
@@ -472,9 +489,8 @@
 
 class StatsCollectorForTest : public webrtc::StatsCollector {
  public:
-  explicit StatsCollectorForTest(WebRtcSession* session) :
-      StatsCollector(session), time_now_(19477) {
-  }
+  explicit StatsCollectorForTest(PeerConnection* pc)
+      : StatsCollector(pc), time_now_(19477) {}
 
   double GetTimeNow() override {
     return time_now_;
@@ -487,15 +503,18 @@
 class StatsCollectorTest : public testing::Test {
  protected:
   StatsCollectorTest()
-    : media_engine_(new cricket::FakeMediaEngine()),
-      channel_manager_(
-          new cricket::ChannelManager(media_engine_, rtc::Thread::Current())),
-      session_(channel_manager_.get()),
-      signaling_(channel_manager_.get()) {
+      : media_engine_(new cricket::FakeMediaEngine()),
+        channel_manager_(
+            new cricket::ChannelManager(media_engine_, rtc::Thread::Current())),
+        session_(channel_manager_.get()) {
     // By default, we ignore session GetStats calls.
     EXPECT_CALL(session_, GetTransportStats(_)).WillRepeatedly(Return(false));
-    EXPECT_CALL(session_, mediastream_signaling()).WillRepeatedly(
-        Return(&signaling_));
+    // Add default returns for mock classes.
+    EXPECT_CALL(session_, video_channel()).WillRepeatedly(ReturnNull());
+    EXPECT_CALL(session_, voice_channel()).WillRepeatedly(ReturnNull());
+    EXPECT_CALL(pc_, session()).WillRepeatedly(Return(&session_));
+    EXPECT_CALL(pc_, sctp_data_channels())
+        .WillRepeatedly(ReturnRef(data_channels_));
   }
 
   ~StatsCollectorTest() {}
@@ -557,6 +576,16 @@
         .WillOnce(DoAll(SetArgPointee<1>(kRemoteTrackId), Return(true)));
   }
 
+  void AddDataChannel(cricket::DataChannelType type,
+                      const std::string& label,
+                      int id) {
+    InternalDataChannelInit config;
+    config.id = id;
+
+    data_channels_.push_back(DataChannel::Create(
+        &data_channel_provider_, cricket::DCT_SCTP, label, config));
+  }
+
   StatsReport* AddCandidateReport(StatsCollector* collector,
                                   const cricket::Candidate& candidate,
                                   bool local) {
@@ -644,7 +673,7 @@
                               const std::vector<std::string>& local_ders,
                               const rtc::FakeSSLCertificate& remote_cert,
                               const std::vector<std::string>& remote_ders) {
-    StatsCollectorForTest stats(&session_);
+    StatsCollectorForTest stats(&pc_);
 
     StatsReports reports;  // returned values.
 
@@ -679,8 +708,6 @@
     EXPECT_CALL(session_, GetTransportStats(_))
       .WillOnce(DoAll(SetArgPointee<0>(session_stats),
                       Return(true)));
-    EXPECT_CALL(session_, video_channel()).WillRepeatedly(ReturnNull());
-    EXPECT_CALL(session_, voice_channel()).WillRepeatedly(ReturnNull());
 
     stats.UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
 
@@ -734,12 +761,13 @@
   cricket::FakeMediaEngine* media_engine_;
   rtc::scoped_ptr<cricket::ChannelManager> channel_manager_;
   MockWebRtcSession session_;
-  FakeMediaStreamSignaling signaling_;
+  MockPeerConnection pc_;
   FakeDataChannelProvider data_channel_provider_;
   cricket::SessionStats session_stats_;
   rtc::scoped_refptr<webrtc::MediaStream> stream_;
   rtc::scoped_refptr<webrtc::VideoTrack> track_;
   rtc::scoped_refptr<FakeAudioTrack> audio_track_;
+  std::vector<rtc::scoped_refptr<DataChannel>> data_channels_;
 };
 
 // Verify that ExtractDataInfo populates reports.
@@ -749,14 +777,8 @@
   const std::string state = DataChannelInterface::DataStateString(
       DataChannelInterface::DataState::kConnecting);
 
-  EXPECT_CALL(session_, video_channel()).WillRepeatedly(ReturnNull());
-  EXPECT_CALL(session_, voice_channel()).WillRepeatedly(ReturnNull());
-
-  InternalDataChannelInit config;
-  config.id = id;
-  signaling_.AddDataChannel(DataChannel::Create(
-      &data_channel_provider_, cricket::DCT_SCTP, label, config));
-  StatsCollectorForTest stats(&session_);
+  AddDataChannel(cricket::DCT_SCTP, label, id);
+  StatsCollectorForTest stats(&pc_);
 
   stats.UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
 
@@ -788,7 +810,7 @@
 
 // This test verifies that 64-bit counters are passed successfully.
 TEST_F(StatsCollectorTest, BytesCounterHandles64Bits) {
-  StatsCollectorForTest stats(&session_);
+  StatsCollectorForTest stats(&pc_);
 
   EXPECT_CALL(session_, GetLocalCertificate(_, _))
       .WillRepeatedly(Return(false));
@@ -834,7 +856,7 @@
 
 // Test that BWE information is reported via stats.
 TEST_F(StatsCollectorTest, BandwidthEstimationInfoIsReported) {
-  StatsCollectorForTest stats(&session_);
+  StatsCollectorForTest stats(&pc_);
 
   EXPECT_CALL(session_, GetLocalCertificate(_, _))
       .WillRepeatedly(Return(false));
@@ -891,11 +913,9 @@
 // This test verifies that an object of type "googSession" always
 // exists in the returned stats.
 TEST_F(StatsCollectorTest, SessionObjectExists) {
-  StatsCollectorForTest stats(&session_);
+  StatsCollectorForTest stats(&pc_);
 
   StatsReports reports;  // returned values.
-  EXPECT_CALL(session_, video_channel()).WillRepeatedly(ReturnNull());
-  EXPECT_CALL(session_, voice_channel()).WillRepeatedly(ReturnNull());
   stats.UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
   stats.GetStats(NULL, &reports);
   const StatsReport* session_report = FindNthReportByType(
@@ -906,11 +926,9 @@
 // This test verifies that only one object of type "googSession" exists
 // in the returned stats.
 TEST_F(StatsCollectorTest, OnlyOneSessionObjectExists) {
-  StatsCollectorForTest stats(&session_);
+  StatsCollectorForTest stats(&pc_);
 
   StatsReports reports;  // returned values.
-  EXPECT_CALL(session_, video_channel()).WillRepeatedly(ReturnNull());
-  EXPECT_CALL(session_, voice_channel()).WillRepeatedly(ReturnNull());
   stats.UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
   stats.UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
   stats.GetStats(NULL, &reports);
@@ -925,7 +943,7 @@
 // This test verifies that the empty track report exists in the returned stats
 // without calling StatsCollector::UpdateStats.
 TEST_F(StatsCollectorTest, TrackObjectExistsWithoutUpdateStats) {
-  StatsCollectorForTest stats(&session_);
+  StatsCollectorForTest stats(&pc_);
 
   MockVideoMediaChannel* media_channel = new MockVideoMediaChannel();
   cricket::VideoChannel video_channel(rtc::Thread::Current(),
@@ -950,7 +968,7 @@
 // This test verifies that the empty track report exists in the returned stats
 // when StatsCollector::UpdateStats is called with ssrc stats.
 TEST_F(StatsCollectorTest, TrackAndSsrcObjectExistAfterUpdateSsrcStats) {
-  StatsCollectorForTest stats(&session_);
+  StatsCollectorForTest stats(&pc_);
 
   EXPECT_CALL(session_, GetLocalCertificate(_, _))
       .WillRepeatedly(Return(false));
@@ -1018,7 +1036,7 @@
 // This test verifies that an SSRC object has the identifier of a Transport
 // stats object, and that this transport stats object exists in stats.
 TEST_F(StatsCollectorTest, TransportObjectLinkedFromSsrcObject) {
-  StatsCollectorForTest stats(&session_);
+  StatsCollectorForTest stats(&pc_);
 
   EXPECT_CALL(session_, GetLocalCertificate(_, _))
       .WillRepeatedly(Return(false));
@@ -1081,7 +1099,7 @@
 // This test verifies that a remote stats object will not be created for
 // an outgoing SSRC where remote stats are not returned.
 TEST_F(StatsCollectorTest, RemoteSsrcInfoIsAbsent) {
-  StatsCollectorForTest stats(&session_);
+  StatsCollectorForTest stats(&pc_);
 
   MockVideoMediaChannel* media_channel = new MockVideoMediaChannel();
   // The transport_name known by the video channel.
@@ -1091,9 +1109,6 @@
   AddOutgoingVideoTrackStats();
   stats.AddStream(stream_);
 
-  EXPECT_CALL(session_, video_channel()).WillRepeatedly(ReturnNull());
-  EXPECT_CALL(session_, voice_channel()).WillRepeatedly(ReturnNull());
-
   stats.UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
   StatsReports reports;
   stats.GetStats(NULL, &reports);
@@ -1105,7 +1120,7 @@
 // This test verifies that a remote stats object will be created for
 // an outgoing SSRC where stats are returned.
 TEST_F(StatsCollectorTest, RemoteSsrcInfoIsPresent) {
-  StatsCollectorForTest stats(&session_);
+  StatsCollectorForTest stats(&pc_);
 
   EXPECT_CALL(session_, GetLocalCertificate(_, _))
       .WillRepeatedly(Return(false));
@@ -1156,7 +1171,7 @@
 // This test verifies that the empty track report exists in the returned stats
 // when StatsCollector::UpdateStats is called with ssrc stats.
 TEST_F(StatsCollectorTest, ReportsFromRemoteTrack) {
-  StatsCollectorForTest stats(&session_);
+  StatsCollectorForTest stats(&pc_);
 
   EXPECT_CALL(session_, GetLocalCertificate(_, _))
       .WillRepeatedly(Return(false));
@@ -1214,7 +1229,7 @@
 // This test verifies the Ice Candidate report should contain the correct
 // information from local/remote candidates.
 TEST_F(StatsCollectorTest, IceCandidateReport) {
-  StatsCollectorForTest stats(&session_);
+  StatsCollectorForTest stats(&pc_);
 
   StatsReports reports;                     // returned values.
 
@@ -1344,7 +1359,7 @@
 // This test verifies that the stats are generated correctly when no
 // transport is present.
 TEST_F(StatsCollectorTest, NoTransport) {
-  StatsCollectorForTest stats(&session_);
+  StatsCollectorForTest stats(&pc_);
 
   EXPECT_CALL(session_, GetLocalCertificate(_, _))
       .WillRepeatedly(Return(false));
@@ -1370,9 +1385,6 @@
     .WillOnce(DoAll(SetArgPointee<0>(session_stats),
                     Return(true)));
 
-  EXPECT_CALL(session_, video_channel()).WillRepeatedly(ReturnNull());
-  EXPECT_CALL(session_, voice_channel()).WillRepeatedly(ReturnNull());
-
   stats.UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
   stats.GetStats(NULL, &reports);
 
@@ -1406,7 +1418,7 @@
 // This test verifies that the stats are generated correctly when the transport
 // does not have any certificates.
 TEST_F(StatsCollectorTest, NoCertificates) {
-  StatsCollectorForTest stats(&session_);
+  StatsCollectorForTest stats(&pc_);
 
   EXPECT_CALL(session_, GetLocalCertificate(_, _))
       .WillRepeatedly(Return(false));
@@ -1435,9 +1447,6 @@
   EXPECT_CALL(session_, GetTransportStats(_))
     .WillOnce(DoAll(SetArgPointee<0>(session_stats),
                     Return(true)));
-  EXPECT_CALL(session_, video_channel()).WillRepeatedly(ReturnNull());
-  EXPECT_CALL(session_, voice_channel()).WillRepeatedly(ReturnNull());
-
   stats.UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
   stats.GetStats(NULL, &reports);
 
@@ -1475,7 +1484,7 @@
 // This test verifies that a local stats object can get statistics via
 // AudioTrackInterface::GetStats() method.
 TEST_F(StatsCollectorTest, GetStatsFromLocalAudioTrack) {
-  StatsCollectorForTest stats(&session_);
+  StatsCollectorForTest stats(&pc_);
 
   EXPECT_CALL(session_, GetLocalCertificate(_, _))
       .WillRepeatedly(Return(false));
@@ -1510,7 +1519,7 @@
 // This test verifies that audio receive streams populate stats reports
 // correctly.
 TEST_F(StatsCollectorTest, GetStatsFromRemoteStream) {
-  StatsCollectorForTest stats(&session_);
+  StatsCollectorForTest stats(&pc_);
 
   EXPECT_CALL(session_, GetLocalCertificate(_, _))
       .WillRepeatedly(Return(false));
@@ -1539,7 +1548,7 @@
 // This test verifies that a local stats object won't update its statistics
 // after a RemoveLocalAudioTrack() call.
 TEST_F(StatsCollectorTest, GetStatsAfterRemoveAudioStream) {
-  StatsCollectorForTest stats(&session_);
+  StatsCollectorForTest stats(&pc_);
 
   EXPECT_CALL(session_, GetLocalCertificate(_, _))
       .WillRepeatedly(Return(false));
@@ -1600,7 +1609,7 @@
 // This test verifies that when ongoing and incoming audio tracks are using
 // the same ssrc, they populate stats reports correctly.
 TEST_F(StatsCollectorTest, LocalAndRemoteTracksWithSameSsrc) {
-  StatsCollectorForTest stats(&session_);
+  StatsCollectorForTest stats(&pc_);
 
   EXPECT_CALL(session_, GetLocalCertificate(_, _))
       .WillRepeatedly(Return(false));
@@ -1687,7 +1696,7 @@
 // TODO(xians): Figure out if it is possible to encapsulate the setup and
 // avoid duplication of code in test cases.
 TEST_F(StatsCollectorTest, TwoLocalTracksWithSameSsrc) {
-  StatsCollectorForTest stats(&session_);
+  StatsCollectorForTest stats(&pc_);
 
   EXPECT_CALL(session_, GetLocalCertificate(_, _))
       .WillRepeatedly(Return(false));
diff --git a/talk/app/webrtc/webrtcsession.cc b/talk/app/webrtc/webrtcsession.cc
index bdc4784..1a5751a 100644
--- a/talk/app/webrtc/webrtcsession.cc
+++ b/talk/app/webrtc/webrtcsession.cc
@@ -38,6 +38,7 @@
 #include "talk/app/webrtc/mediaconstraintsinterface.h"
 #include "talk/app/webrtc/mediastreamsignaling.h"
 #include "talk/app/webrtc/peerconnectioninterface.h"
+#include "talk/app/webrtc/sctputils.h"
 #include "talk/app/webrtc/webrtcsessiondescriptionfactory.h"
 #include "talk/media/base/constants.h"
 #include "talk/media/base/videocapturer.h"
@@ -536,12 +537,10 @@
   bool ice_restart_;
 };
 
-WebRtcSession::WebRtcSession(
-    cricket::ChannelManager* channel_manager,
-    rtc::Thread* signaling_thread,
-    rtc::Thread* worker_thread,
-    cricket::PortAllocator* port_allocator,
-    MediaStreamSignaling* mediastream_signaling)
+WebRtcSession::WebRtcSession(cricket::ChannelManager* channel_manager,
+                             rtc::Thread* signaling_thread,
+                             rtc::Thread* worker_thread,
+                             cricket::PortAllocator* port_allocator)
     : cricket::BaseSession(signaling_thread,
                            worker_thread,
                            port_allocator,
@@ -551,7 +550,6 @@
       // o line MUST be representable with a "64 bit signed integer".
       // Due to this constraint session id |sid_| is max limited to LLONG_MAX.
       channel_manager_(channel_manager),
-      mediastream_signaling_(mediastream_signaling),
       ice_observer_(NULL),
       ice_connection_state_(PeerConnectionInterface::kIceConnectionNew),
       ice_connection_receiving_(true),
@@ -643,9 +641,6 @@
       data_channel_type_ = cricket::DCT_SCTP;
     }
   }
-  if (data_channel_type_ != cricket::DCT_NONE) {
-    mediastream_signaling_->SetDataChannelFactory(this);
-  }
 
   // Find DSCP constraint.
   if (FindConstraint(
@@ -743,21 +738,19 @@
   if (!dtls_enabled_) {
     // Construct with DTLS disabled.
     webrtc_session_desc_factory_.reset(new WebRtcSessionDescriptionFactory(
-        signaling_thread(), channel_manager_, mediastream_signaling_, this,
-        id(), data_channel_type_));
+        signaling_thread(), channel_manager_, this, id()));
   } else {
     // Construct with DTLS enabled.
     if (!certificate) {
       // Use the |dtls_identity_store| to generate a certificate.
       RTC_DCHECK(dtls_identity_store);
       webrtc_session_desc_factory_.reset(new WebRtcSessionDescriptionFactory(
-          signaling_thread(), channel_manager_, mediastream_signaling_,
-          dtls_identity_store.Pass(), this, id(), data_channel_type_));
+          signaling_thread(), channel_manager_, dtls_identity_store.Pass(),
+          this, id()));
     } else {
       // Use the already generated certificate.
       webrtc_session_desc_factory_.reset(new WebRtcSessionDescriptionFactory(
-          signaling_thread(), channel_manager_, mediastream_signaling_,
-          certificate, this, id(), data_channel_type_));
+          signaling_thread(), channel_manager_, certificate, this, id()));
     }
   }
 
@@ -819,13 +812,17 @@
 
 void WebRtcSession::CreateOffer(
     CreateSessionDescriptionObserver* observer,
-    const PeerConnectionInterface::RTCOfferAnswerOptions& options) {
-  webrtc_session_desc_factory_->CreateOffer(observer, options);
+    const PeerConnectionInterface::RTCOfferAnswerOptions& options,
+    const cricket::MediaSessionOptions& session_options) {
+  webrtc_session_desc_factory_->CreateOffer(observer, options, session_options);
 }
 
-void WebRtcSession::CreateAnswer(CreateSessionDescriptionObserver* observer,
-                                 const MediaConstraintsInterface* constraints) {
-  webrtc_session_desc_factory_->CreateAnswer(observer, constraints);
+void WebRtcSession::CreateAnswer(
+    CreateSessionDescriptionObserver* observer,
+    const MediaConstraintsInterface* constraints,
+    const cricket::MediaSessionOptions& session_options) {
+  webrtc_session_desc_factory_->CreateAnswer(observer, constraints,
+                                             session_options);
 }
 
 bool WebRtcSession::SetLocalDescription(SessionDescriptionInterface* desc,
@@ -883,14 +880,6 @@
     UseCandidatesInSessionDescription(remote_desc_.get());
   }
 
-  // Update state and SSRC of local MediaStreams and DataChannels based on the
-  // local session description.
-  mediastream_signaling_->OnLocalDescriptionChanged(local_desc_.get());
-
-  rtc::SSLRole role;
-  if (data_channel_type_ == cricket::DCT_SCTP && GetSslRole(&role)) {
-    mediastream_signaling_->OnDtlsRoleReadyForSctp(role);
-  }
   if (error() != cricket::BaseSession::ERROR_NONE) {
     return BadLocalSdp(desc->type(), GetSessionErrorMsg(), err_desc);
   }
@@ -927,8 +916,6 @@
     return false;
   }
 
-  // Update remote MediaStreams.
-  mediastream_signaling_->OnRemoteDescriptionChanged(desc);
   if (local_description() && !UseCandidatesInSessionDescription(desc)) {
     return BadRemoteSdp(desc->type(), kInvalidCandidates, err_desc);
   }
@@ -950,11 +937,6 @@
 
   remote_desc_.reset(desc_temp.release());
 
-  rtc::SSLRole role;
-  if (data_channel_type_ == cricket::DCT_SCTP && GetSslRole(&role)) {
-    mediastream_signaling_->OnDtlsRoleReadyForSctp(role);
-  }
-
   if (error() != cricket::BaseSession::ERROR_NONE) {
     return BadRemoteSdp(desc->type(), GetSessionErrorMsg(), err_desc);
   }
@@ -1387,6 +1369,8 @@
                                                &DataChannel::OnChannelReady);
   data_channel_->SignalDataReceived.connect(webrtc_data_channel,
                                             &DataChannel::OnDataReceived);
+  data_channel_->SignalStreamClosedRemotely.connect(
+      webrtc_data_channel, &DataChannel::OnStreamClosedRemotely);
   return true;
 }
 
@@ -1397,6 +1381,7 @@
   }
   data_channel_->SignalReadyToSendData.disconnect(webrtc_data_channel);
   data_channel_->SignalDataReceived.disconnect(webrtc_data_channel);
+  data_channel_->SignalStreamClosedRemotely.disconnect(webrtc_data_channel);
 }
 
 void WebRtcSession::AddSctpDataStream(int sid) {
@@ -1409,8 +1394,6 @@
 }
 
 void WebRtcSession::RemoveSctpDataStream(int sid) {
-  mediastream_signaling_->RemoveSctpDataChannel(sid);
-
   if (!data_channel_) {
     LOG(LS_ERROR) << "RemoveDataChannelStreams called when data_channel_ is "
                   << "NULL.";
@@ -1424,41 +1407,6 @@
   return data_channel_ && data_channel_->ready_to_send_data();
 }
 
-rtc::scoped_refptr<DataChannel> WebRtcSession::CreateDataChannel(
-    const std::string& label,
-    const InternalDataChannelInit* config) {
-  if (state() == STATE_RECEIVEDTERMINATE) {
-    return NULL;
-  }
-  if (data_channel_type_ == cricket::DCT_NONE) {
-    LOG(LS_ERROR) << "CreateDataChannel: Data is not supported in this call.";
-    return NULL;
-  }
-  InternalDataChannelInit new_config =
-      config ? (*config) : InternalDataChannelInit();
-  if (data_channel_type_ == cricket::DCT_SCTP) {
-    if (new_config.id < 0) {
-      rtc::SSLRole role;
-      if (GetSslRole(&role) &&
-          !mediastream_signaling_->AllocateSctpSid(role, &new_config.id)) {
-        LOG(LS_ERROR) << "No id can be allocated for the SCTP data channel.";
-        return NULL;
-      }
-    } else if (!mediastream_signaling_->IsSctpSidAvailable(new_config.id)) {
-      LOG(LS_ERROR) << "Failed to create a SCTP data channel "
-                    << "because the id is already in use or out of range.";
-      return NULL;
-    }
-  }
-
-  rtc::scoped_refptr<DataChannel> channel(DataChannel::Create(
-      this, data_channel_type_, label, new_config));
-  if (channel && !mediastream_signaling_->AddDataChannel(channel))
-    return NULL;
-
-  return channel;
-}
-
 cricket::DataChannelType WebRtcSession::data_channel_type() const {
   return data_channel_type_;
 }
@@ -1727,7 +1675,6 @@
   const cricket::ContentInfo* video_info =
       cricket::GetFirstVideoContent(desc);
   if ((!video_info || video_info->rejected) && video_channel_) {
-    mediastream_signaling_->OnVideoChannelClose();
     SignalVideoChannelDestroyed();
     const std::string content_name = video_channel_->content_name();
     channel_manager_->DestroyVideoChannel(video_channel_.release());
@@ -1736,7 +1683,6 @@
   const cricket::ContentInfo* voice_info =
       cricket::GetFirstAudioContent(desc);
   if ((!voice_info || voice_info->rejected) && voice_channel_) {
-    mediastream_signaling_->OnAudioChannelClose();
     SignalVoiceChannelDestroyed();
     const std::string content_name = voice_channel_->content_name();
     channel_manager_->DestroyVoiceChannel(voice_channel_.release());
@@ -1745,7 +1691,6 @@
   const cricket::ContentInfo* data_info =
       cricket::GetFirstDataContent(desc);
   if ((!data_info || data_info->rejected) && data_channel_) {
-    mediastream_signaling_->OnDataChannelClose();
     SignalDataChannelDestroyed();
     const std::string content_name = data_channel_->content_name();
     channel_manager_->DestroyDataChannel(data_channel_.release());
@@ -1820,6 +1765,8 @@
 
   voice_channel_->SignalDtlsSetupFailure.connect(
       this, &WebRtcSession::OnDtlsSetupFailure);
+
+  SignalVoiceChannelCreated();
   return true;
 }
 
@@ -1833,6 +1780,8 @@
 
   video_channel_->SignalDtlsSetupFailure.connect(
       this, &WebRtcSession::OnDtlsSetupFailure);
+
+  SignalVideoChannelCreated();
   return true;
 }
 
@@ -1845,16 +1794,14 @@
   }
 
   if (sctp) {
-    mediastream_signaling_->OnDataTransportCreatedForSctp();
     data_channel_->SignalDataReceived.connect(
         this, &WebRtcSession::OnDataChannelMessageReceived);
-    data_channel_->SignalStreamClosedRemotely.connect(
-        mediastream_signaling_,
-        &MediaStreamSignaling::OnRemoteSctpDataChannelClosed);
   }
 
   data_channel_->SignalDtlsSetupFailure.connect(
       this, &WebRtcSession::OnDtlsSetupFailure);
+
+  SignalDataChannelCreated();
   return true;
 }
 
@@ -1880,13 +1827,22 @@
     cricket::DataChannel* channel,
     const cricket::ReceiveDataParams& params,
     const rtc::Buffer& payload) {
-  ASSERT(data_channel_type_ == cricket::DCT_SCTP);
-  if (params.type == cricket::DMT_CONTROL &&
-      mediastream_signaling_->IsSctpSidAvailable(params.ssrc)) {
-    // Received CONTROL on unused sid, process as an OPEN message.
-    mediastream_signaling_->AddDataChannelFromOpenMessage(params, payload);
+  RTC_DCHECK(data_channel_type_ == cricket::DCT_SCTP);
+  if (params.type == cricket::DMT_CONTROL && IsOpenMessage(payload)) {
+    // Received OPEN message; parse and signal that a new data channel should
+    // be created.
+    std::string label;
+    InternalDataChannelInit config;
+    config.id = params.ssrc;
+    if (!ParseDataChannelOpenMessage(payload, &label, &config)) {
+      LOG(LS_WARNING) << "Failed to parse the OPEN message for sid "
+                      << params.ssrc;
+      return;
+    }
+    config.open_handshake_role = InternalDataChannelInit::kAcker;
+    SignalDataChannelOpenMessage(label, config);
   }
-  // otherwise ignore the message.
+  // Otherwise ignore the message.
 }
 
 // Returns false if bundle is enabled and rtcp_mux is disabled.
diff --git a/talk/app/webrtc/webrtcsession.h b/talk/app/webrtc/webrtcsession.h
index b214eb6..8dcc85f 100644
--- a/talk/app/webrtc/webrtcsession.h
+++ b/talk/app/webrtc/webrtcsession.h
@@ -114,7 +114,6 @@
 
 class WebRtcSession : public cricket::BaseSession,
                       public AudioProviderInterface,
-                      public DataChannelFactory,
                       public VideoProviderInterface,
                       public DtmfProviderInterface,
                       public DataChannelProviderInterface {
@@ -122,8 +121,7 @@
   WebRtcSession(cricket::ChannelManager* channel_manager,
                 rtc::Thread* signaling_thread,
                 rtc::Thread* worker_thread,
-                cricket::PortAllocator* port_allocator,
-                MediaStreamSignaling* mediastream_signaling);
+                cricket::PortAllocator* port_allocator);
   virtual ~WebRtcSession();
 
   bool Initialize(
@@ -149,10 +147,6 @@
     return data_channel_.get();
   }
 
-  virtual const MediaStreamSignaling* mediastream_signaling() const {
-    return mediastream_signaling_;
-  }
-
   void SetSdesPolicy(cricket::SecurePolicy secure_policy);
   cricket::SecurePolicy SdesPolicy() const;
 
@@ -165,9 +159,11 @@
 
   void CreateOffer(
       CreateSessionDescriptionObserver* observer,
-      const PeerConnectionInterface::RTCOfferAnswerOptions& options);
+      const PeerConnectionInterface::RTCOfferAnswerOptions& options,
+      const cricket::MediaSessionOptions& session_options);
   void CreateAnswer(CreateSessionDescriptionObserver* observer,
-                    const MediaConstraintsInterface* constraints);
+                    const MediaConstraintsInterface* constraints,
+                    const cricket::MediaSessionOptions& session_options);
   // The ownership of |desc| will be transferred after this call.
   bool SetLocalDescription(SessionDescriptionInterface* desc,
                            std::string* err_desc);
@@ -251,11 +247,6 @@
   virtual bool GetRemoteSSLCertificate(const std::string& transport_name,
                                        rtc::SSLCertificate** cert);
 
-  // Implements DataChannelFactory.
-  rtc::scoped_refptr<DataChannel> CreateDataChannel(
-      const std::string& label,
-      const InternalDataChannelInit* config) override;
-
   cricket::DataChannelType data_channel_type() const;
 
   bool IceRestartPending() const;
@@ -277,6 +268,20 @@
     metrics_observer_ = metrics_observer;
   }
 
+  // Called when voice_channel_, video_channel_ and data_channel_ are created
+  // and destroyed. As a result of, for example, setting a new description.
+  sigslot::signal0<> SignalVoiceChannelCreated;
+  sigslot::signal0<> SignalVoiceChannelDestroyed;
+  sigslot::signal0<> SignalVideoChannelCreated;
+  sigslot::signal0<> SignalVideoChannelDestroyed;
+  sigslot::signal0<> SignalDataChannelCreated;
+  sigslot::signal0<> SignalDataChannelDestroyed;
+
+  // Called when a valid data channel OPEN message is received.
+  // std::string represents the data channel label.
+  sigslot::signal2<const std::string&, const InternalDataChannelInit&>
+      SignalDataChannelOpenMessage;
+
  private:
   // Indicates the type of SessionDescription in a call to SetLocalDescription
   // and SetRemoteDescription.
@@ -386,7 +391,6 @@
   rtc::scoped_ptr<cricket::VideoChannel> video_channel_;
   rtc::scoped_ptr<cricket::DataChannel> data_channel_;
   cricket::ChannelManager* channel_manager_;
-  MediaStreamSignaling* mediastream_signaling_;
   IceObserver* ice_observer_;
   PeerConnectionInterface::IceConnectionState ice_connection_state_;
   bool ice_connection_receiving_;
@@ -410,10 +414,6 @@
   rtc::scoped_ptr<WebRtcSessionDescriptionFactory>
       webrtc_session_desc_factory_;
 
-  sigslot::signal0<> SignalVoiceChannelDestroyed;
-  sigslot::signal0<> SignalVideoChannelDestroyed;
-  sigslot::signal0<> SignalDataChannelDestroyed;
-
   // Member variables for caching global options.
   cricket::AudioOptions audio_options_;
   cricket::VideoOptions video_options_;
diff --git a/talk/app/webrtc/webrtcsession_unittest.cc b/talk/app/webrtc/webrtcsession_unittest.cc
index a463ba9..f0558ee 100644
--- a/talk/app/webrtc/webrtcsession_unittest.cc
+++ b/talk/app/webrtc/webrtcsession_unittest.cc
@@ -31,11 +31,13 @@
 #include "talk/app/webrtc/fakemetricsobserver.h"
 #include "talk/app/webrtc/jsepicecandidate.h"
 #include "talk/app/webrtc/jsepsessiondescription.h"
+#include "talk/app/webrtc/peerconnection.h"
 #include "talk/app/webrtc/mediastreamsignaling.h"
+#include "talk/app/webrtc/sctputils.h"
+#include "talk/app/webrtc/streamcollection.h"
 #include "talk/app/webrtc/streamcollection.h"
 #include "talk/app/webrtc/test/fakeconstraints.h"
 #include "talk/app/webrtc/test/fakedtlsidentitystore.h"
-#include "talk/app/webrtc/test/fakemediastreamsignaling.h"
 #include "talk/app/webrtc/videotrack.h"
 #include "talk/app/webrtc/webrtcsession.h"
 #include "talk/app/webrtc/webrtcsessiondescriptionfactory.h"
@@ -79,10 +81,12 @@
 using webrtc::CreateSessionDescription;
 using webrtc::CreateSessionDescriptionObserver;
 using webrtc::CreateSessionDescriptionRequest;
+using webrtc::DataChannel;
 using webrtc::DtlsIdentityStoreInterface;
 using webrtc::FakeConstraints;
 using webrtc::FakeMetricsObserver;
 using webrtc::IceCandidateCollection;
+using webrtc::InternalDataChannelInit;
 using webrtc::JsepIceCandidate;
 using webrtc::JsepSessionDescription;
 using webrtc::PeerConnectionFactoryInterface;
@@ -157,6 +161,14 @@
     "a=rtpmap:96 rtx/90000\r\n"
     "a=fmtp:96 apt=0\r\n";
 
+static const char kStream1[] = "stream1";
+static const char kVideoTrack1[] = "video1";
+static const char kAudioTrack1[] = "audio1";
+
+static const char kStream2[] = "stream2";
+static const char kVideoTrack2[] = "video2";
+static const char kAudioTrack2[] = "audio2";
+
 enum RTCCertificateGenerationMethod { ALREADY_GENERATED, DTLS_IDENTITY_STORE };
 
 // Add some extra |newlines| to the |message| after |line|.
@@ -237,10 +249,8 @@
                        rtc::Thread* signaling_thread,
                        rtc::Thread* worker_thread,
                        cricket::PortAllocator* port_allocator,
-                       webrtc::IceObserver* ice_observer,
-                       webrtc::MediaStreamSignaling* mediastream_signaling)
-    : WebRtcSession(cmgr, signaling_thread, worker_thread, port_allocator,
-                    mediastream_signaling) {
+                       webrtc::IceObserver* ice_observer)
+      : WebRtcSession(cmgr, signaling_thread, worker_thread, port_allocator) {
     RegisterIceObserver(ice_observer);
   }
   virtual ~WebRtcSessionForTest() {}
@@ -344,7 +354,8 @@
 };
 
 class WebRtcSessionTest
-    : public testing::TestWithParam<RTCCertificateGenerationMethod> {
+    : public testing::TestWithParam<RTCCertificateGenerationMethod>,
+      public sigslot::has_slots<> {
  protected:
   // TODO Investigate why ChannelManager crashes, if it's created
   // after stun_server.
@@ -366,7 +377,6 @@
       stun_server_(cricket::TestStunServer::Create(Thread::Current(),
                                                    stun_socket_addr_)),
       turn_server_(Thread::Current(), kTurnUdpIntAddr, kTurnUdpExtAddr),
-      mediastream_signaling_(channel_manager_.get()),
       metrics_observer_(new rtc::RefCountedObject<FakeMetricsObserver>()) {
     cricket::ServerAddresses stun_servers;
     stun_servers.insert(stun_socket_addr_);
@@ -395,10 +405,10 @@
       const PeerConnectionInterface::RTCConfiguration& rtc_configuration) {
     ASSERT_TRUE(session_.get() == NULL);
     session_.reset(new WebRtcSessionForTest(
-        channel_manager_.get(), rtc::Thread::Current(),
-        rtc::Thread::Current(), allocator_.get(),
-        &observer_,
-        &mediastream_signaling_));
+        channel_manager_.get(), rtc::Thread::Current(), rtc::Thread::Current(),
+        allocator_.get(), &observer_));
+    session_->SignalDataChannelOpenMessage.connect(
+        this, &WebRtcSessionTest::OnDataChannelOpenMessage);
 
     EXPECT_EQ(PeerConnectionInterface::kIceConnectionNew,
         observer_.ice_connection_state_);
@@ -411,6 +421,12 @@
     session_->set_metrics_observer(metrics_observer_);
   }
 
+  void OnDataChannelOpenMessage(const std::string& label,
+                                const InternalDataChannelInit& config) {
+    last_data_channel_label_ = label;
+    last_data_channel_config_ = config;
+  }
+
   void Init() {
     PeerConnectionInterface::RTCConfiguration configuration;
     Init(nullptr, configuration);
@@ -474,8 +490,97 @@
     Init();
   }
 
-  // Creates a local offer and applies it. Starts ice.
-  // Call mediastream_signaling_.UseOptionsWithStreamX() before this function
+  void SendAudioVideoStream1() {
+    send_stream_1_ = true;
+    send_stream_2_ = false;
+    send_audio_ = true;
+    send_video_ = true;
+  }
+
+  void SendAudioVideoStream2() {
+    send_stream_1_ = false;
+    send_stream_2_ = true;
+    send_audio_ = true;
+    send_video_ = true;
+  }
+
+  void SendAudioVideoStream1And2() {
+    send_stream_1_ = true;
+    send_stream_2_ = true;
+    send_audio_ = true;
+    send_video_ = true;
+  }
+
+  void SendNothing() {
+    send_stream_1_ = false;
+    send_stream_2_ = false;
+    send_audio_ = false;
+    send_video_ = false;
+  }
+
+  void SendAudioOnlyStream2() {
+    send_stream_1_ = false;
+    send_stream_2_ = true;
+    send_audio_ = true;
+    send_video_ = false;
+  }
+
+  void SendVideoOnlyStream2() {
+    send_stream_1_ = false;
+    send_stream_2_ = true;
+    send_audio_ = false;
+    send_video_ = true;
+  }
+
+  void AddStreamsToOptions(cricket::MediaSessionOptions* session_options) {
+    if (send_stream_1_ && send_audio_) {
+      session_options->AddSendStream(cricket::MEDIA_TYPE_AUDIO, kAudioTrack1,
+                                     kStream1);
+    }
+    if (send_stream_1_ && send_video_) {
+      session_options->AddSendStream(cricket::MEDIA_TYPE_VIDEO, kVideoTrack1,
+                                     kStream1);
+    }
+    if (send_stream_2_ && send_audio_) {
+      session_options->AddSendStream(cricket::MEDIA_TYPE_AUDIO, kAudioTrack2,
+                                     kStream2);
+    }
+    if (send_stream_2_ && send_video_) {
+      session_options->AddSendStream(cricket::MEDIA_TYPE_VIDEO, kVideoTrack2,
+                                     kStream2);
+    }
+    if (data_channel_ && session_->data_channel_type() == cricket::DCT_RTP) {
+      session_options->AddSendStream(cricket::MEDIA_TYPE_DATA,
+                                     data_channel_->label(),
+                                     data_channel_->label());
+    }
+  }
+
+  void GetOptionsForOffer(
+      const PeerConnectionInterface::RTCOfferAnswerOptions& rtc_options,
+      cricket::MediaSessionOptions* session_options) {
+    AddStreamsToOptions(session_options);
+    ASSERT_TRUE(ConvertRtcOptionsForOffer(rtc_options, session_options));
+
+    if (session_->data_channel_type() == cricket::DCT_SCTP && data_channel_) {
+      session_options->data_channel_type = cricket::DCT_SCTP;
+    }
+  }
+
+  void GetOptionsForAnswer(const webrtc::MediaConstraintsInterface* constraints,
+                           cricket::MediaSessionOptions* session_options) {
+    AddStreamsToOptions(session_options);
+    session_options->recv_audio = false;
+    session_options->recv_video = false;
+    ASSERT_TRUE(ParseConstraintsForAnswer(constraints, session_options));
+
+    if (session_->data_channel_type() == cricket::DCT_SCTP) {
+      session_options->data_channel_type = cricket::DCT_SCTP;
+    }
+  }
+
+  // Creates a local offer and applies it. Starts ICE.
+  // Call SendAudioVideoStreamX() before this function
   // to decide which streams to create.
   void InitiateCall() {
     SessionDescriptionInterface* offer = CreateOffer();
@@ -497,7 +602,9 @@
       const PeerConnectionInterface::RTCOfferAnswerOptions& options) {
     rtc::scoped_refptr<WebRtcSessionCreateSDPObserverForTest>
         observer = new WebRtcSessionCreateSDPObserverForTest();
-    session_->CreateOffer(observer, options);
+    cricket::MediaSessionOptions session_options;
+    GetOptionsForOffer(options, &session_options);
+    session_->CreateOffer(observer, options, session_options);
     EXPECT_TRUE_WAIT(
         observer->state() != WebRtcSessionCreateSDPObserverForTest::kInit,
         2000);
@@ -508,7 +615,9 @@
       const webrtc::MediaConstraintsInterface* constraints) {
     rtc::scoped_refptr<WebRtcSessionCreateSDPObserverForTest> observer
         = new WebRtcSessionCreateSDPObserverForTest();
-    session_->CreateAnswer(observer, constraints);
+    cricket::MediaSessionOptions session_options;
+    GetOptionsForAnswer(constraints, &session_options);
+    session_->CreateAnswer(observer, constraints, session_options);
     EXPECT_TRUE_WAIT(
         observer->state() != WebRtcSessionCreateSDPObserverForTest::kInit,
         2000);
@@ -638,7 +747,7 @@
         value_set);
     session_.reset();
     Init();
-    mediastream_signaling_.SendAudioVideoStream1();
+    SendAudioVideoStream1();
     SessionDescriptionInterface* offer = CreateOffer();
 
     SetLocalDescriptionWithoutError(offer);
@@ -735,7 +844,7 @@
 
   // Creates a remote offer and and applies it as a remote description,
   // creates a local answer and applies is as a local description.
-  // Call mediastream_signaling_.UseOptionsWithStreamX() before this function
+  // Call SendAudioVideoStreamX() before this function
   // to decide which local and remote streams to create.
   void CreateAndSetRemoteOfferAndLocalAnswer() {
     SessionDescriptionInterface* offer = CreateRemoteOffer();
@@ -911,11 +1020,11 @@
     return offer;
   }
 
-  // Create a remote offer. Call mediastream_signaling_.UseOptionsWithStreamX()
+  // Create a remote offer. Call SendAudioVideoStreamX()
   // before this function to decide which streams to create.
   JsepSessionDescription* CreateRemoteOffer() {
     cricket::MediaSessionOptions options;
-    mediastream_signaling_.GetOptionsForAnswer(NULL, &options);
+    GetOptionsForAnswer(NULL, &options);
     return CreateRemoteOffer(options, session_->remote_description());
   }
 
@@ -943,21 +1052,20 @@
       return CreateRemoteAnswer(offer, options, cricket::SEC_REQUIRED);
   }
 
-  // Creates an answer session description with streams based on
-  // |mediastream_signaling_|. Call
-  // mediastream_signaling_.UseOptionsWithStreamX() before this function
+  // Creates an answer session description.
+  // Call SendAudioVideoStreamX() before this function
   // to decide which streams to create.
   JsepSessionDescription* CreateRemoteAnswer(
       const SessionDescriptionInterface* offer) {
     cricket::MediaSessionOptions options;
-    mediastream_signaling_.GetOptionsForAnswer(NULL, &options);
+    GetOptionsForAnswer(NULL, &options);
     return CreateRemoteAnswer(offer, options, cricket::SEC_REQUIRED);
   }
 
   void TestSessionCandidatesWithBundleRtcpMux(bool bundle, bool rtcp_mux) {
     AddInterface(rtc::SocketAddress(kClientAddrHost1, kClientAddrPort));
     Init();
-    mediastream_signaling_.SendAudioVideoStream1();
+    SendAudioVideoStream1();
 
     PeerConnectionInterface::RTCOfferAnswerOptions options;
     options.use_rtp_mux = bundle;
@@ -1005,7 +1113,7 @@
     } else {
       Init();
     }
-    mediastream_signaling_.SendAudioVideoStream1();
+    SendAudioVideoStream1();
     CreateAndSetRemoteOfferAndLocalAnswer();
     EXPECT_FALSE(session_->CanInsertDtmf(""));
     EXPECT_EQ(can, session_->CanInsertDtmf(kAudioTrack1));
@@ -1121,7 +1229,7 @@
   void TestLoopbackCall(const LoopbackNetworkConfiguration& config) {
     LoopbackNetworkManager loopback_network_manager(this, config);
     Init();
-    mediastream_signaling_.SendAudioVideoStream1();
+    SendAudioVideoStream1();
     SessionDescriptionInterface* offer = CreateOffer();
 
     EXPECT_EQ(PeerConnectionInterface::kIceGatheringNew,
@@ -1218,10 +1326,15 @@
     return true;
   }
 
-  void SetLocalDescriptionWithDataChannel() {
+  void CreateDataChannel() {
     webrtc::InternalDataChannelInit dci;
-    dci.reliable = false;
-    session_->CreateDataChannel("datachannel", &dci);
+    dci.reliable = session_->data_channel_type() == cricket::DCT_SCTP;
+    data_channel_ = DataChannel::Create(
+        session_.get(), session_->data_channel_type(), "datachannel", dci);
+  }
+
+  void SetLocalDescriptionWithDataChannel() {
+    CreateDataChannel();
     SessionDescriptionInterface* offer = CreateOffer();
     SetLocalDescriptionWithoutError(offer);
   }
@@ -1252,15 +1365,16 @@
     }
 
     PeerConnectionInterface::RTCOfferAnswerOptions options;
+    cricket::MediaSessionOptions session_options;
     const int kNumber = 3;
     rtc::scoped_refptr<WebRtcSessionCreateSDPObserverForTest>
         observers[kNumber];
     for (int i = 0; i < kNumber; ++i) {
       observers[i] = new WebRtcSessionCreateSDPObserverForTest();
       if (type == CreateSessionDescriptionRequest::kOffer) {
-        session_->CreateOffer(observers[i], options);
+        session_->CreateOffer(observers[i], options, session_options);
       } else {
-        session_->CreateAnswer(observers[i], NULL);
+        session_->CreateAnswer(observers[i], nullptr, session_options);
       }
     }
 
@@ -1305,12 +1419,20 @@
   rtc::scoped_ptr<cricket::BasicPortAllocator> allocator_;
   PeerConnectionFactoryInterface::Options options_;
   rtc::scoped_ptr<FakeConstraints> constraints_;
-  FakeMediaStreamSignaling mediastream_signaling_;
   rtc::scoped_ptr<WebRtcSessionForTest> session_;
   MockIceObserver observer_;
   cricket::FakeVideoMediaChannel* video_channel_;
   cricket::FakeVoiceMediaChannel* voice_channel_;
   rtc::scoped_refptr<FakeMetricsObserver> metrics_observer_;
+  // The following flags affect options created for CreateOffer/CreateAnswer.
+  bool send_stream_1_ = false;
+  bool send_stream_2_ = false;
+  bool send_audio_ = false;
+  bool send_video_ = false;
+  rtc::scoped_refptr<DataChannel> data_channel_;
+  // Last values received from data channel creation signal.
+  std::string last_data_channel_label_;
+  InternalDataChannelInit last_data_channel_config_;
 };
 
 TEST_P(WebRtcSessionTest, TestInitializeWithDtls) {
@@ -1343,7 +1465,7 @@
   AddInterface(rtc::SocketAddress(kClientAddrHost1, kClientAddrPort));
   AddInterface(rtc::SocketAddress(kClientAddrHost2, kClientAddrPort));
   Init();
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
   InitiateCall();
   EXPECT_TRUE_WAIT(observer_.oncandidatesready_, kIceCandidatesTimeout);
   EXPECT_EQ(8u, observer_.mline_0_candidates_.size());
@@ -1358,7 +1480,7 @@
                 rtc::FD_ANY,
                 rtc::SocketAddress(kClientAddrHost1, kClientAddrPort));
   Init();
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
   InitiateCall();
   // Since kClientAddrHost1 is blocked, not expecting stun candidates for it.
   EXPECT_TRUE_WAIT(observer_.oncandidatesready_, kIceCandidatesTimeout);
@@ -1370,7 +1492,7 @@
 TEST_F(WebRtcSessionTest, TestIceTransportsNone) {
   AddInterface(rtc::SocketAddress(kClientAddrHost1, kClientAddrPort));
   InitWithIceTransport(PeerConnectionInterface::kNone);
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
   InitiateCall();
   EXPECT_TRUE_WAIT(observer_.oncandidatesready_, kIceCandidatesTimeout);
   EXPECT_EQ(0u, observer_.mline_0_candidates_.size());
@@ -1383,7 +1505,7 @@
   AddInterface(rtc::SocketAddress(kClientAddrHost1, kClientAddrPort));
   ConfigureAllocatorWithTurn();
   InitWithIceTransport(PeerConnectionInterface::kRelay);
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
   InitiateCall();
   EXPECT_TRUE_WAIT(observer_.oncandidatesready_, kIceCandidatesTimeout);
   EXPECT_EQ(2u, observer_.mline_0_candidates_.size());
@@ -1402,7 +1524,7 @@
 TEST_F(WebRtcSessionTest, TestIceTransportsAll) {
   AddInterface(rtc::SocketAddress(kClientAddrHost1, kClientAddrPort));
   InitWithIceTransport(PeerConnectionInterface::kAll);
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
   InitiateCall();
   EXPECT_TRUE_WAIT(observer_.oncandidatesready_, kIceCandidatesTimeout);
   // Host + STUN. By default allocator is disabled to gather relay candidates.
@@ -1423,13 +1545,13 @@
 // media engine creates the expected send and receive streams.
 TEST_F(WebRtcSessionTest, TestCreateSdesOfferReceiveSdesAnswer) {
   Init();
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
   SessionDescriptionInterface* offer = CreateOffer();
   const std::string session_id_orig = offer->session_id();
   const std::string session_version_orig = offer->session_version();
   SetLocalDescriptionWithoutError(offer);
 
-  mediastream_signaling_.SendAudioVideoStream2();
+  SendAudioVideoStream2();
   SessionDescriptionInterface* answer =
       CreateRemoteAnswer(session_->local_description());
   SetRemoteDescriptionWithoutError(answer);
@@ -1449,7 +1571,7 @@
   EXPECT_TRUE(kAudioTrack1 == voice_channel_->send_streams()[0].id);
 
   // Create new offer without send streams.
-  mediastream_signaling_.SendNothing();
+  SendNothing();
   offer = CreateOffer();
 
   // Verify the session id is the same and the session version is
@@ -1462,7 +1584,7 @@
   EXPECT_EQ(0u, video_channel_->send_streams().size());
   EXPECT_EQ(0u, voice_channel_->send_streams().size());
 
-  mediastream_signaling_.SendAudioVideoStream2();
+  SendAudioVideoStream2();
   answer = CreateRemoteAnswer(session_->local_description());
   SetRemoteDescriptionWithoutError(answer);
 
@@ -1477,12 +1599,12 @@
 // media engine creates the expected send and receive streams.
 TEST_F(WebRtcSessionTest, TestReceiveSdesOfferCreateSdesAnswer) {
   Init();
-  mediastream_signaling_.SendAudioVideoStream2();
+  SendAudioVideoStream2();
   SessionDescriptionInterface* offer = CreateOffer();
   VerifyCryptoParams(offer->description());
   SetRemoteDescriptionWithoutError(offer);
 
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
   SessionDescriptionInterface* answer = CreateAnswer(NULL);
   VerifyCryptoParams(answer->description());
   SetLocalDescriptionWithoutError(answer);
@@ -1504,12 +1626,12 @@
   ASSERT_EQ(1u, voice_channel_->send_streams().size());
   EXPECT_TRUE(kAudioTrack1 == voice_channel_->send_streams()[0].id);
 
-  mediastream_signaling_.SendAudioVideoStream1And2();
+  SendAudioVideoStream1And2();
   offer = CreateOffer();
   SetRemoteDescriptionWithoutError(offer);
 
   // Answer by turning off all send streams.
-  mediastream_signaling_.SendNothing();
+  SendNothing();
   answer = CreateAnswer(NULL);
 
   // Verify the session id is the same and the session version is
@@ -1623,7 +1745,7 @@
 // and that we return an answer with a DTLS fingerprint.
 TEST_P(WebRtcSessionTest, TestReceiveDtlsOfferCreateDtlsAnswer) {
   MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
   InitWithDtls(GetParam());
   SetFactoryDtlsSrtp();
   cricket::MediaSessionOptions options;
@@ -1652,7 +1774,7 @@
 // and then we accept a remote answer with a DTLS fingerprint successfully.
 TEST_P(WebRtcSessionTest, TestCreateDtlsOfferReceiveDtlsAnswer) {
   MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
   InitWithDtls(GetParam());
   SetFactoryDtlsSrtp();
 
@@ -1741,7 +1863,7 @@
 // Test that we create a local offer without SDES or DTLS and accept a remote
 // answer without SDES or DTLS when encryption is disabled.
 TEST_P(WebRtcSessionTest, TestCreateOfferReceiveAnswerWithoutEncryption) {
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
   options_.disable_encryption = true;
   InitWithDtls(GetParam());
 
@@ -1797,7 +1919,7 @@
 
 TEST_F(WebRtcSessionTest, TestSetLocalOfferTwice) {
   Init();
-  mediastream_signaling_.SendNothing();
+  SendNothing();
   // SetLocalDescription take ownership of offer.
   SessionDescriptionInterface* offer = CreateOffer();
   SetLocalDescriptionWithoutError(offer);
@@ -1809,7 +1931,7 @@
 
 TEST_F(WebRtcSessionTest, TestSetRemoteOfferTwice) {
   Init();
-  mediastream_signaling_.SendNothing();
+  SendNothing();
   // SetLocalDescription take ownership of offer.
   SessionDescriptionInterface* offer = CreateOffer();
   SetRemoteDescriptionWithoutError(offer);
@@ -1820,7 +1942,7 @@
 
 TEST_F(WebRtcSessionTest, TestSetLocalAndRemoteOffer) {
   Init();
-  mediastream_signaling_.SendNothing();
+  SendNothing();
   SessionDescriptionInterface* offer = CreateOffer();
   SetLocalDescriptionWithoutError(offer);
   offer = CreateOffer();
@@ -1830,7 +1952,7 @@
 
 TEST_F(WebRtcSessionTest, TestSetRemoteAndLocalOffer) {
   Init();
-  mediastream_signaling_.SendNothing();
+  SendNothing();
   SessionDescriptionInterface* offer = CreateOffer();
   SetRemoteDescriptionWithoutError(offer);
   offer = CreateOffer();
@@ -1840,7 +1962,7 @@
 
 TEST_F(WebRtcSessionTest, TestSetLocalPrAnswer) {
   Init();
-  mediastream_signaling_.SendNothing();
+  SendNothing();
   SessionDescriptionInterface* offer = CreateRemoteOffer();
   SetRemoteDescriptionExpectState(offer, BaseSession::STATE_RECEIVEDINITIATE);
 
@@ -1849,21 +1971,21 @@
   pranswer->set_type(SessionDescriptionInterface::kPrAnswer);
   SetLocalDescriptionExpectState(pranswer, BaseSession::STATE_SENTPRACCEPT);
 
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
   JsepSessionDescription* pranswer2 = static_cast<JsepSessionDescription*>(
       CreateAnswer(NULL));
   pranswer2->set_type(SessionDescriptionInterface::kPrAnswer);
 
   SetLocalDescriptionExpectState(pranswer2, BaseSession::STATE_SENTPRACCEPT);
 
-  mediastream_signaling_.SendAudioVideoStream2();
+  SendAudioVideoStream2();
   SessionDescriptionInterface* answer = CreateAnswer(NULL);
   SetLocalDescriptionExpectState(answer, BaseSession::STATE_SENTACCEPT);
 }
 
 TEST_F(WebRtcSessionTest, TestSetRemotePrAnswer) {
   Init();
-  mediastream_signaling_.SendNothing();
+  SendNothing();
   SessionDescriptionInterface* offer = CreateOffer();
   SetLocalDescriptionExpectState(offer, BaseSession::STATE_SENTINITIATE);
 
@@ -1874,7 +1996,7 @@
   SetRemoteDescriptionExpectState(pranswer,
                                   BaseSession::STATE_RECEIVEDPRACCEPT);
 
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
   JsepSessionDescription* pranswer2 =
       CreateRemoteAnswer(session_->local_description());
   pranswer2->set_type(SessionDescriptionInterface::kPrAnswer);
@@ -1882,7 +2004,7 @@
   SetRemoteDescriptionExpectState(pranswer2,
                                   BaseSession::STATE_RECEIVEDPRACCEPT);
 
-  mediastream_signaling_.SendAudioVideoStream2();
+  SendAudioVideoStream2();
   SessionDescriptionInterface* answer =
       CreateRemoteAnswer(session_->local_description());
   SetRemoteDescriptionExpectState(answer, BaseSession::STATE_RECEIVEDACCEPT);
@@ -1890,7 +2012,7 @@
 
 TEST_F(WebRtcSessionTest, TestSetLocalAnswerWithoutOffer) {
   Init();
-  mediastream_signaling_.SendNothing();
+  SendNothing();
   rtc::scoped_ptr<SessionDescriptionInterface> offer(CreateOffer());
 
   SessionDescriptionInterface* answer =
@@ -1901,7 +2023,7 @@
 
 TEST_F(WebRtcSessionTest, TestSetRemoteAnswerWithoutOffer) {
   Init();
-  mediastream_signaling_.SendNothing();
+  SendNothing();
   rtc::scoped_ptr<SessionDescriptionInterface> offer(CreateOffer());
 
   SessionDescriptionInterface* answer =
@@ -1912,7 +2034,7 @@
 
 TEST_F(WebRtcSessionTest, TestAddRemoteCandidate) {
   Init();
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
 
   cricket::Candidate candidate;
   candidate.set_component(1);
@@ -1965,7 +2087,7 @@
   candidate1.set_component(1);
   JsepIceCandidate ice_candidate1(kMediaContentName0, kMediaContentIndex0,
                                   candidate1);
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
   CreateAndSetRemoteOfferAndLocalAnswer();
 
   EXPECT_TRUE(session_->ProcessIceMessage(&ice_candidate1));
@@ -2014,7 +2136,7 @@
 TEST_F(WebRtcSessionTest, TestLocalCandidatesAddedToSessionDescription) {
   AddInterface(rtc::SocketAddress(kClientAddrHost1, kClientAddrPort));
   Init();
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
   CreateAndSetRemoteOfferAndLocalAnswer();
 
   const SessionDescriptionInterface* local_desc = session_->local_description();
@@ -2034,7 +2156,7 @@
   EXPECT_EQ(0u, candidates->count());
 
   // Update the session descriptions.
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
   CreateAndSetRemoteOfferAndLocalAnswer();
 
   local_desc = session_->local_description();
@@ -2054,7 +2176,7 @@
   candidate1.set_component(1);
   JsepIceCandidate ice_candidate(kMediaContentName0, kMediaContentIndex0,
                                  candidate1);
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
   SessionDescriptionInterface* offer = CreateOffer();
 
   EXPECT_TRUE(offer->AddCandidate(&ice_candidate));
@@ -2078,7 +2200,7 @@
 TEST_F(WebRtcSessionTest, TestSetLocalAndRemoteDescriptionWithCandidates) {
   AddInterface(rtc::SocketAddress(kClientAddrHost1, kClientAddrPort));
   Init();
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
   // Ice is started but candidates are not provided until SetLocalDescription
   // is called.
   EXPECT_EQ(0u, observer_.mline_0_candidates_.size());
@@ -2105,7 +2227,7 @@
 // present in the SessionDescription.
 TEST_F(WebRtcSessionTest, TestChannelCreationsWithContentNames) {
   Init();
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
   rtc::scoped_ptr<SessionDescriptionInterface> offer(CreateOffer());
 
   // CreateOffer creates session description with the content names "audio" and
@@ -2170,7 +2292,7 @@
 TEST_F(WebRtcSessionTest, CreateOfferWithoutConstraints) {
   Init();
   // Test Audio only offer.
-  mediastream_signaling_.UseOptionsAudioOnly();
+  SendAudioOnlyStream2();
   rtc::scoped_ptr<SessionDescriptionInterface> offer(CreateOffer());
 
   const cricket::ContentInfo* content =
@@ -2180,7 +2302,7 @@
   EXPECT_TRUE(content == NULL);
 
   // Test Audio / Video offer.
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
   offer.reset(CreateOffer());
   content = cricket::GetFirstAudioContent(offer->description());
   EXPECT_TRUE(content != NULL);
@@ -2319,7 +2441,7 @@
   rtc::scoped_ptr<JsepSessionDescription> offer(CreateRemoteOffer());
   SetRemoteDescriptionWithoutError(offer.release());
   // Test with a stream with tracks.
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
   rtc::scoped_ptr<SessionDescriptionInterface> answer(
       CreateAnswer(NULL));
   const cricket::ContentInfo* content =
@@ -2369,7 +2491,7 @@
   constraints_no_receive.SetMandatoryReceiveVideo(false);
 
   // Test with a stream with tracks.
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
   rtc::scoped_ptr<SessionDescriptionInterface> answer(
       CreateAnswer(&constraints_no_receive));
 
@@ -2426,7 +2548,7 @@
   EXPECT_TRUE(media_engine_->GetVideoChannel(0) == NULL);
   EXPECT_TRUE(media_engine_->GetVoiceChannel(0) == NULL);
 
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
   SessionDescriptionInterface* offer = CreateOffer();
 
   cricket::MediaSessionOptions options;
@@ -2447,7 +2569,7 @@
   EXPECT_EQ(kAudioTrack1, voice_channel_->send_streams()[0].id);
 
   // Let the remote end update the session descriptions, with Audio and Video.
-  mediastream_signaling_.SendAudioVideoStream2();
+  SendAudioVideoStream2();
   CreateAndSetRemoteOfferAndLocalAnswer();
 
   video_channel_ = media_engine_->GetVideoChannel(0);
@@ -2466,7 +2588,7 @@
   EXPECT_EQ(kAudioTrack2, voice_channel_->send_streams()[0].id);
 
   // Change session back to audio only.
-  mediastream_signaling_.UseOptionsAudioOnly();
+  SendAudioOnlyStream2();
   CreateAndSetRemoteOfferAndLocalAnswer();
 
   EXPECT_EQ(0u, video_channel_->recv_streams().size());
@@ -2482,7 +2604,7 @@
   Init();
   EXPECT_TRUE(media_engine_->GetVideoChannel(0) == NULL);
   EXPECT_TRUE(media_engine_->GetVoiceChannel(0) == NULL);
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
   SessionDescriptionInterface* offer = CreateOffer();
 
   cricket::MediaSessionOptions options;
@@ -2507,7 +2629,7 @@
   EXPECT_EQ(kVideoTrack1, video_channel_->send_streams()[0].id);
 
   // Update the session descriptions, with Audio and Video.
-  mediastream_signaling_.SendAudioVideoStream2();
+  SendAudioVideoStream2();
   CreateAndSetRemoteOfferAndLocalAnswer();
 
   voice_channel_ = media_engine_->GetVoiceChannel(0);
@@ -2519,7 +2641,7 @@
   EXPECT_EQ(kAudioTrack2, voice_channel_->send_streams()[0].id);
 
   // Change session back to video only.
-  mediastream_signaling_.UseOptionsVideoOnly();
+  SendVideoOnlyStream2();
   CreateAndSetRemoteOfferAndLocalAnswer();
 
   video_channel_ = media_engine_->GetVideoChannel(0);
@@ -2533,7 +2655,7 @@
 
 TEST_F(WebRtcSessionTest, VerifyCryptoParamsInSDP) {
   Init();
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
   scoped_ptr<SessionDescriptionInterface> offer(CreateOffer());
   VerifyCryptoParams(offer->description());
   SetRemoteDescriptionWithoutError(offer.release());
@@ -2544,7 +2666,7 @@
 TEST_F(WebRtcSessionTest, VerifyNoCryptoParamsInSDP) {
   options_.disable_encryption = true;
   Init();
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
   scoped_ptr<SessionDescriptionInterface> offer(CreateOffer());
   VerifyNoCryptoParams(offer->description(), false);
 }
@@ -2563,7 +2685,7 @@
 // no a=ice-ufrag and a=ice-pwd lines are present in the SDP.
 TEST_F(WebRtcSessionTest, TestSetLocalDescriptionWithoutIce) {
   Init();
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
   rtc::scoped_ptr<SessionDescriptionInterface> offer(CreateOffer());
 
   std::string sdp;
@@ -2589,7 +2711,7 @@
 // too short ice ufrag and pwd strings.
 TEST_F(WebRtcSessionTest, TestSetLocalDescriptionInvalidIceCredentials) {
   Init();
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
   rtc::scoped_ptr<SessionDescriptionInterface> offer(CreateOffer());
 
   std::string sdp;
@@ -2695,7 +2817,7 @@
   AddInterface(rtc::SocketAddress(kClientAddrHost1, kClientAddrPort));
 
   InitWithBundlePolicy(PeerConnectionInterface::kBundlePolicyBalanced);
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
 
   PeerConnectionInterface::RTCOfferAnswerOptions options;
   options.use_rtp_mux = true;
@@ -2778,7 +2900,7 @@
 // kBundlePolicyBalanced BUNDLE policy and answer contains BUNDLE.
 TEST_F(WebRtcSessionTest, TestBalancedBundleInAnswer) {
   InitWithBundlePolicy(PeerConnectionInterface::kBundlePolicyBalanced);
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
 
   PeerConnectionInterface::RTCOfferAnswerOptions options;
   options.use_rtp_mux = true;
@@ -2789,7 +2911,7 @@
   EXPECT_NE(session_->voice_rtp_transport_channel(),
             session_->video_rtp_transport_channel());
 
-  mediastream_signaling_.SendAudioVideoStream2();
+  SendAudioVideoStream2();
   SessionDescriptionInterface* answer =
       CreateRemoteAnswer(session_->local_description());
   SetRemoteDescriptionWithoutError(answer);
@@ -2801,7 +2923,7 @@
 // kBundlePolicyBalanced BUNDLE policy but no BUNDLE in the answer.
 TEST_F(WebRtcSessionTest, TestBalancedNoBundleInAnswer) {
   InitWithBundlePolicy(PeerConnectionInterface::kBundlePolicyBalanced);
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
 
   PeerConnectionInterface::RTCOfferAnswerOptions options;
   options.use_rtp_mux = true;
@@ -2812,7 +2934,7 @@
   EXPECT_NE(session_->voice_rtp_transport_channel(),
             session_->video_rtp_transport_channel());
 
-  mediastream_signaling_.SendAudioVideoStream2();
+  SendAudioVideoStream2();
 
   // Remove BUNDLE from the answer.
   rtc::scoped_ptr<SessionDescriptionInterface> answer(
@@ -2831,7 +2953,7 @@
 // kBundlePolicyMaxBundle policy with BUNDLE in the answer.
 TEST_F(WebRtcSessionTest, TestMaxBundleBundleInAnswer) {
   InitWithBundlePolicy(PeerConnectionInterface::kBundlePolicyMaxBundle);
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
 
   PeerConnectionInterface::RTCOfferAnswerOptions options;
   options.use_rtp_mux = true;
@@ -2842,7 +2964,7 @@
   EXPECT_EQ(session_->voice_rtp_transport_channel(),
             session_->video_rtp_transport_channel());
 
-  mediastream_signaling_.SendAudioVideoStream2();
+  SendAudioVideoStream2();
   SessionDescriptionInterface* answer =
       CreateRemoteAnswer(session_->local_description());
   SetRemoteDescriptionWithoutError(answer);
@@ -2855,7 +2977,7 @@
 // audio content in the answer.
 TEST_F(WebRtcSessionTest, TestMaxBundleRejectAudio) {
   InitWithBundlePolicy(PeerConnectionInterface::kBundlePolicyMaxBundle);
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
 
   PeerConnectionInterface::RTCOfferAnswerOptions options;
   options.use_rtp_mux = true;
@@ -2866,7 +2988,7 @@
   EXPECT_EQ(session_->voice_rtp_transport_channel(),
             session_->video_rtp_transport_channel());
 
-  mediastream_signaling_.SendAudioVideoStream2();
+  SendAudioVideoStream2();
   cricket::MediaSessionOptions recv_options;
   recv_options.recv_audio = false;
   recv_options.recv_video = true;
@@ -2887,7 +3009,7 @@
 // kBundlePolicyMaxBundle policy but no BUNDLE in the answer.
 TEST_F(WebRtcSessionTest, TestMaxBundleNoBundleInAnswer) {
   InitWithBundlePolicy(PeerConnectionInterface::kBundlePolicyMaxBundle);
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
 
   PeerConnectionInterface::RTCOfferAnswerOptions options;
   options.use_rtp_mux = true;
@@ -2898,7 +3020,7 @@
   EXPECT_EQ(session_->voice_rtp_transport_channel(),
             session_->video_rtp_transport_channel());
 
-  mediastream_signaling_.SendAudioVideoStream2();
+  SendAudioVideoStream2();
 
   // Remove BUNDLE from the answer.
   rtc::scoped_ptr<SessionDescriptionInterface> answer(
@@ -2917,7 +3039,7 @@
 // kBundlePolicyMaxBundle policy with BUNDLE in the remote offer.
 TEST_F(WebRtcSessionTest, TestMaxBundleBundleInRemoteOffer) {
   InitWithBundlePolicy(PeerConnectionInterface::kBundlePolicyMaxBundle);
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
 
   SessionDescriptionInterface* offer = CreateRemoteOffer();
   SetRemoteDescriptionWithoutError(offer);
@@ -2925,7 +3047,7 @@
   EXPECT_EQ(session_->voice_rtp_transport_channel(),
             session_->video_rtp_transport_channel());
 
-  mediastream_signaling_.SendAudioVideoStream2();
+  SendAudioVideoStream2();
   SessionDescriptionInterface* answer = CreateAnswer(nullptr);
   SetLocalDescriptionWithoutError(answer);
 
@@ -2936,7 +3058,7 @@
 // kBundlePolicyMaxBundle policy but no BUNDLE in the remote offer.
 TEST_F(WebRtcSessionTest, TestMaxBundleNoBundleInRemoteOffer) {
   InitWithBundlePolicy(PeerConnectionInterface::kBundlePolicyMaxBundle);
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
 
   // Remove BUNDLE from the offer.
   rtc::scoped_ptr<SessionDescriptionInterface> offer(CreateRemoteOffer());
@@ -2954,7 +3076,7 @@
 // kBundlePolicyMaxCompat bundle policy and answer contains BUNDLE.
 TEST_F(WebRtcSessionTest, TestMaxCompatBundleInAnswer) {
   InitWithBundlePolicy(PeerConnectionInterface::kBundlePolicyMaxCompat);
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
 
   PeerConnectionInterface::RTCOfferAnswerOptions options;
   options.use_rtp_mux = true;
@@ -2965,7 +3087,7 @@
   EXPECT_NE(session_->voice_rtp_transport_channel(),
             session_->video_rtp_transport_channel());
 
-  mediastream_signaling_.SendAudioVideoStream2();
+  SendAudioVideoStream2();
   SessionDescriptionInterface* answer =
       CreateRemoteAnswer(session_->local_description());
   SetRemoteDescriptionWithoutError(answer);
@@ -2979,7 +3101,7 @@
 // kBundlePolicyMaxCompat BUNDLE policy but no BUNDLE in the answer.
 TEST_F(WebRtcSessionTest, TestMaxCompatNoBundleInAnswer) {
   InitWithBundlePolicy(PeerConnectionInterface::kBundlePolicyMaxCompat);
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
   PeerConnectionInterface::RTCOfferAnswerOptions options;
   options.use_rtp_mux = true;
 
@@ -2989,7 +3111,7 @@
   EXPECT_NE(session_->voice_rtp_transport_channel(),
             session_->video_rtp_transport_channel());
 
-  mediastream_signaling_.SendAudioVideoStream2();
+  SendAudioVideoStream2();
 
   // Remove BUNDLE from the answer.
   rtc::scoped_ptr<SessionDescriptionInterface> answer(
@@ -3008,7 +3130,7 @@
 // kBundlePolicyMaxbundle and then we call SetRemoteDescription first.
 TEST_F(WebRtcSessionTest, TestMaxBundleWithSetRemoteDescriptionFirst) {
   InitWithBundlePolicy(PeerConnectionInterface::kBundlePolicyMaxBundle);
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
 
   PeerConnectionInterface::RTCOfferAnswerOptions options;
   options.use_rtp_mux = true;
@@ -3022,7 +3144,7 @@
 
 TEST_F(WebRtcSessionTest, TestRequireRtcpMux) {
   InitWithRtcpMuxPolicy(PeerConnectionInterface::kRtcpMuxPolicyRequire);
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
 
   PeerConnectionInterface::RTCOfferAnswerOptions options;
   SessionDescriptionInterface* offer = CreateOffer(options);
@@ -3031,7 +3153,7 @@
   EXPECT_TRUE(session_->voice_rtcp_transport_channel() == NULL);
   EXPECT_TRUE(session_->video_rtcp_transport_channel() == NULL);
 
-  mediastream_signaling_.SendAudioVideoStream2();
+  SendAudioVideoStream2();
   SessionDescriptionInterface* answer =
       CreateRemoteAnswer(session_->local_description());
   SetRemoteDescriptionWithoutError(answer);
@@ -3042,7 +3164,7 @@
 
 TEST_F(WebRtcSessionTest, TestNegotiateRtcpMux) {
   InitWithRtcpMuxPolicy(PeerConnectionInterface::kRtcpMuxPolicyNegotiate);
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
 
   PeerConnectionInterface::RTCOfferAnswerOptions options;
   SessionDescriptionInterface* offer = CreateOffer(options);
@@ -3051,7 +3173,7 @@
   EXPECT_TRUE(session_->voice_rtcp_transport_channel() != NULL);
   EXPECT_TRUE(session_->video_rtcp_transport_channel() != NULL);
 
-  mediastream_signaling_.SendAudioVideoStream2();
+  SendAudioVideoStream2();
   SessionDescriptionInterface* answer =
       CreateRemoteAnswer(session_->local_description());
   SetRemoteDescriptionWithoutError(answer);
@@ -3064,7 +3186,7 @@
 // if BUNDLE is enabled but rtcp-mux is disabled in m-lines.
 TEST_F(WebRtcSessionTest, TestDisabledRtcpMuxWithBundleEnabled) {
   Init();
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
 
   PeerConnectionInterface::RTCOfferAnswerOptions options;
   options.use_rtp_mux = true;
@@ -3092,7 +3214,7 @@
 
 TEST_F(WebRtcSessionTest, SetAudioPlayout) {
   Init();
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
   CreateAndSetRemoteOfferAndLocalAnswer();
   cricket::FakeVoiceMediaChannel* channel = media_engine_->GetVoiceChannel(0);
   ASSERT_TRUE(channel != NULL);
@@ -3111,7 +3233,7 @@
 
 TEST_F(WebRtcSessionTest, SetAudioSend) {
   Init();
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
   CreateAndSetRemoteOfferAndLocalAnswer();
   cricket::FakeVoiceMediaChannel* channel = media_engine_->GetVoiceChannel(0);
   ASSERT_TRUE(channel != NULL);
@@ -3139,7 +3261,7 @@
 
 TEST_F(WebRtcSessionTest, AudioRendererForLocalStream) {
   Init();
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
   CreateAndSetRemoteOfferAndLocalAnswer();
   cricket::FakeVoiceMediaChannel* channel = media_engine_->GetVoiceChannel(0);
   ASSERT_TRUE(channel != NULL);
@@ -3162,7 +3284,7 @@
 
 TEST_F(WebRtcSessionTest, SetVideoPlayout) {
   Init();
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
   CreateAndSetRemoteOfferAndLocalAnswer();
   cricket::FakeVideoMediaChannel* channel = media_engine_->GetVideoChannel(0);
   ASSERT_TRUE(channel != NULL);
@@ -3179,7 +3301,7 @@
 
 TEST_F(WebRtcSessionTest, SetVideoSend) {
   Init();
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
   CreateAndSetRemoteOfferAndLocalAnswer();
   cricket::FakeVideoMediaChannel* channel = media_engine_->GetVideoChannel(0);
   ASSERT_TRUE(channel != NULL);
@@ -3204,7 +3326,7 @@
 TEST_F(WebRtcSessionTest, InsertDtmf) {
   // Setup
   Init();
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
   CreateAndSetRemoteOfferAndLocalAnswer();
   FakeVoiceMediaChannel* channel = media_engine_->GetVoiceChannel(0);
   EXPECT_EQ(0U, channel->dtmf_info_queue().size());
@@ -3255,7 +3377,7 @@
 // Verifing local offer and remote answer have matching m-lines as per RFC 3264.
 TEST_F(WebRtcSessionTest, TestIncorrectMLinesInRemoteAnswer) {
   Init();
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
   SessionDescriptionInterface* offer = CreateOffer();
   SetLocalDescriptionWithoutError(offer);
   rtc::scoped_ptr<SessionDescriptionInterface> answer(
@@ -3303,7 +3425,7 @@
 // RFC 3264.
 TEST_F(WebRtcSessionTest, TestIncorrectMLinesInLocalAnswer) {
   Init();
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
   SessionDescriptionInterface* offer = CreateRemoteOffer();
   SetRemoteDescriptionWithoutError(offer);
   SessionDescriptionInterface* answer = CreateAnswer(NULL);
@@ -3324,7 +3446,7 @@
 // before SetLocalDescription is called.
 TEST_F(WebRtcSessionTest, TestIceStartAfterSetLocalDescriptionOnly) {
   Init();
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
   SessionDescriptionInterface* offer = CreateRemoteOffer();
   cricket::Candidate candidate;
   candidate.set_component(1);
@@ -3354,7 +3476,7 @@
 // description as per security policy set in MediaSessionDescriptionFactory.
 TEST_F(WebRtcSessionTest, TestCryptoAfterSetLocalDescription) {
   Init();
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
   rtc::scoped_ptr<SessionDescriptionInterface> offer(CreateOffer());
 
   // Making sure SetLocalDescription correctly sets crypto value in
@@ -3373,7 +3495,7 @@
 TEST_F(WebRtcSessionTest, TestCryptoAfterSetLocalDescriptionWithDisabled) {
   options_.disable_encryption = true;
   Init();
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
   rtc::scoped_ptr<SessionDescriptionInterface> offer(CreateOffer());
 
   // Making sure SetLocalDescription correctly sets crypto value in
@@ -3398,7 +3520,7 @@
       CreateRemoteOffer(options));
   SetRemoteDescriptionWithoutError(offer.release());
 
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
   rtc::scoped_ptr<SessionDescriptionInterface> answer(
       CreateAnswer(NULL));
   SetLocalDescriptionWithoutError(answer.release());
@@ -3429,7 +3551,7 @@
       CreateRemoteOffer(options));
   SetRemoteDescriptionWithoutError(offer.release());
 
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
   rtc::scoped_ptr<SessionDescriptionInterface> answer(
       CreateAnswer(NULL));
   SetLocalDescriptionWithoutError(answer.release());
@@ -3452,7 +3574,7 @@
 
 TEST_F(WebRtcSessionTest, TestSessionContentError) {
   Init();
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
   SessionDescriptionInterface* offer = CreateOffer();
   const std::string session_id_orig = offer->session_id();
   const std::string session_version_orig = offer->session_version();
@@ -3461,7 +3583,7 @@
   video_channel_ = media_engine_->GetVideoChannel(0);
   video_channel_->set_fail_set_send_codecs(true);
 
-  mediastream_signaling_.SendAudioVideoStream2();
+  SendAudioVideoStream2();
   SessionDescriptionInterface* answer =
       CreateRemoteAnswer(session_->local_description());
   SetRemoteDescriptionAnswerExpectError("ERROR_CONTENT", answer);
@@ -3629,11 +3751,8 @@
 
   // TEST PLAN: Set the port number to something new, set it in the SDP,
   // and pass it all the way down.
-  webrtc::InternalDataChannelInit dci;
-  dci.reliable = true;
   EXPECT_EQ(cricket::DCT_SCTP, data_engine_->last_channel_type());
-  rtc::scoped_refptr<webrtc::DataChannel> dc =
-      session_->CreateDataChannel("datachannel", &dci);
+  CreateDataChannel();
 
   cricket::FakeDataMediaChannel* ch = data_engine_->GetChannel(0);
   int portnum = -1;
@@ -3655,7 +3774,36 @@
   EXPECT_EQ(new_recv_port, portnum);
 }
 
-TEST_F(WebRtcSessionTest, TestUsesProvidedCertificate) {
+// Verifies that when a session's DataChannel receives an OPEN message,
+// WebRtcSession signals the DataChannel creation request with the expected
+// config.
+TEST_P(WebRtcSessionTest, TestSctpDataChannelOpenMessage) {
+  MAYBE_SKIP_TEST(rtc::SSLStreamAdapter::HaveDtlsSrtp);
+
+  InitWithDtls(GetParam());
+
+  SetLocalDescriptionWithDataChannel();
+  EXPECT_EQ(cricket::DCT_SCTP, data_engine_->last_channel_type());
+
+  webrtc::DataChannelInit config;
+  config.id = 1;
+  rtc::Buffer payload;
+  webrtc::WriteDataChannelOpenMessage("a", config, &payload);
+  cricket::ReceiveDataParams params;
+  params.ssrc = config.id;
+  params.type = cricket::DMT_CONTROL;
+
+  cricket::DataChannel* data_channel = session_->data_channel();
+  data_channel->SignalDataReceived(data_channel, params, payload);
+
+  EXPECT_EQ("a", last_data_channel_label_);
+  EXPECT_EQ(config.id, last_data_channel_config_.id);
+  EXPECT_FALSE(last_data_channel_config_.negotiated);
+  EXPECT_EQ(webrtc::InternalDataChannelInit::kAcker,
+            last_data_channel_config_.open_handshake_role);
+}
+
+TEST_P(WebRtcSessionTest, TestUsesProvidedCertificate) {
   rtc::scoped_refptr<rtc::RTCCertificate> certificate =
       FakeDtlsIdentityStore::GenerateCertificate();
 
@@ -3675,7 +3823,7 @@
   InitWithDtls(GetParam());
 
   EXPECT_TRUE(session_->waiting_for_certificate_for_testing());
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
   rtc::scoped_ptr<SessionDescriptionInterface> offer(CreateOffer());
 
   EXPECT_TRUE(offer != NULL);
@@ -3792,7 +3940,7 @@
   constraints_->AddOptional(
       webrtc::MediaConstraintsInterface::kEnableDscp, true);
   Init();
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
   SessionDescriptionInterface* offer = CreateOffer();
 
   SetLocalDescriptionWithoutError(offer);
@@ -3816,7 +3964,7 @@
       webrtc::MediaConstraintsInterface::kEnableVideoSuspendBelowMinBitrate,
       true);
   Init();
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
   SessionDescriptionInterface* offer = CreateOffer();
 
   SetLocalDescriptionWithoutError(offer);
@@ -3844,7 +3992,7 @@
       webrtc::MediaConstraintsInterface::kCombinedAudioVideoBwe,
       true);
   Init();
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
   SessionDescriptionInterface* offer = CreateOffer();
 
   SetLocalDescriptionWithoutError(offer);
@@ -3864,7 +4012,7 @@
   InitWithDtls(GetParam());
   SetFactoryDtlsSrtp();
 
-  mediastream_signaling_.UseOptionsAudioOnly();
+  SendAudioOnlyStream2();
   SessionDescriptionInterface* offer = CreateOffer();
   SetLocalDescriptionWithoutError(offer);
 
@@ -3894,7 +4042,7 @@
   InitWithDtls(GetParam());
   SetFactoryDtlsSrtp();
 
-  mediastream_signaling_.UseOptionsAudioOnly();
+  SendAudioOnlyStream2();
   SessionDescriptionInterface* offer = CreateOffer();
   SetLocalDescriptionWithoutError(offer);
 
@@ -3920,7 +4068,7 @@
 // by local side.
 TEST_F(WebRtcSessionTest, TestRtxRemovedByCreateAnswer) {
   Init();
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
   std::string offer_sdp(kSdpWithRtx);
 
   SessionDescriptionInterface* offer =
@@ -3943,7 +4091,7 @@
 // and voice channels.
 TEST_F(WebRtcSessionTest, TestSetSocketOptionBeforeBundle) {
   InitWithBundlePolicy(PeerConnectionInterface::kBundlePolicyBalanced);
-  mediastream_signaling_.SendAudioVideoStream1();
+  SendAudioVideoStream1();
 
   PeerConnectionInterface::RTCOfferAnswerOptions options;
   options.use_rtp_mux = true;
@@ -3973,7 +4121,7 @@
   EXPECT_NE(session_->voice_rtp_transport_channel(),
             session_->video_rtp_transport_channel());
 
-  mediastream_signaling_.SendAudioVideoStream2();
+  SendAudioVideoStream2();
   SessionDescriptionInterface* answer =
       CreateRemoteAnswer(session_->local_description());
   SetRemoteDescriptionWithoutError(answer);
@@ -3997,10 +4145,12 @@
   PeerConnectionInterface::RTCOfferAnswerOptions options;
   options.offer_to_receive_audio =
       RTCOfferAnswerOptions::kOfferToReceiveMediaTrue;
+  cricket::MediaSessionOptions session_options;
+  session_options.recv_audio = true;
 
   for (auto& o : observers) {
     o = new WebRtcSessionCreateSDPObserverForTest();
-    session_->CreateOffer(o, options);
+    session_->CreateOffer(o, options, session_options);
   }
 
   session_.reset();
diff --git a/talk/app/webrtc/webrtcsessiondescriptionfactory.cc b/talk/app/webrtc/webrtcsessiondescriptionfactory.cc
index 8769315..d391020 100644
--- a/talk/app/webrtc/webrtcsessiondescriptionfactory.cc
+++ b/talk/app/webrtc/webrtcsessiondescriptionfactory.cc
@@ -31,7 +31,6 @@
 #include "talk/app/webrtc/jsep.h"
 #include "talk/app/webrtc/jsepsessiondescription.h"
 #include "talk/app/webrtc/mediaconstraintsinterface.h"
-#include "talk/app/webrtc/mediastreamsignaling.h"
 #include "talk/app/webrtc/webrtcsession.h"
 #include "webrtc/base/sslidentity.h"
 
@@ -131,16 +130,13 @@
 WebRtcSessionDescriptionFactory::WebRtcSessionDescriptionFactory(
     rtc::Thread* signaling_thread,
     cricket::ChannelManager* channel_manager,
-    MediaStreamSignaling* mediastream_signaling,
     rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
     const rtc::scoped_refptr<WebRtcIdentityRequestObserver>&
         identity_request_observer,
     WebRtcSession* session,
     const std::string& session_id,
-    cricket::DataChannelType dct,
     bool dtls_enabled)
     : signaling_thread_(signaling_thread),
-      mediastream_signaling_(mediastream_signaling),
       session_desc_factory_(channel_manager, &transport_desc_factory_),
       // RFC 4566 suggested a Network Time Protocol (NTP) format timestamp
       // as the session id and session version. To simplify, it should be fine
@@ -151,7 +147,6 @@
       identity_request_observer_(identity_request_observer),
       session_(session),
       session_id_(session_id),
-      data_channel_type_(dct),
       certificate_request_state_(CERTIFICATE_NOT_NEEDED) {
   session_desc_factory_.set_add_legacy_streams(false);
   // SRTP-SDES is disabled if DTLS is on.
@@ -161,18 +156,14 @@
 WebRtcSessionDescriptionFactory::WebRtcSessionDescriptionFactory(
     rtc::Thread* signaling_thread,
     cricket::ChannelManager* channel_manager,
-    MediaStreamSignaling* mediastream_signaling,
     WebRtcSession* session,
-    const std::string& session_id,
-    cricket::DataChannelType dct)
+    const std::string& session_id)
     : WebRtcSessionDescriptionFactory(signaling_thread,
                                       channel_manager,
-                                      mediastream_signaling,
                                       nullptr,
                                       nullptr,
                                       session,
                                       session_id,
-                                      dct,
                                       false) {
   LOG(LS_VERBOSE) << "DTLS-SRTP disabled.";
 }
@@ -180,21 +171,17 @@
 WebRtcSessionDescriptionFactory::WebRtcSessionDescriptionFactory(
     rtc::Thread* signaling_thread,
     cricket::ChannelManager* channel_manager,
-    MediaStreamSignaling* mediastream_signaling,
     rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
     WebRtcSession* session,
-    const std::string& session_id,
-    cricket::DataChannelType dct)
+    const std::string& session_id)
     : WebRtcSessionDescriptionFactory(
-        signaling_thread,
-        channel_manager,
-        mediastream_signaling,
-        dtls_identity_store.Pass(),
-        new rtc::RefCountedObject<WebRtcIdentityRequestObserver>(),
-        session,
-        session_id,
-        dct,
-        true) {
+          signaling_thread,
+          channel_manager,
+          dtls_identity_store.Pass(),
+          new rtc::RefCountedObject<WebRtcIdentityRequestObserver>(),
+          session,
+          session_id,
+          true) {
   RTC_DCHECK(dtls_identity_store_);
 
   certificate_request_state_ = CERTIFICATE_WAITING;
@@ -216,14 +203,16 @@
 WebRtcSessionDescriptionFactory::WebRtcSessionDescriptionFactory(
     rtc::Thread* signaling_thread,
     cricket::ChannelManager* channel_manager,
-    MediaStreamSignaling* mediastream_signaling,
     const rtc::scoped_refptr<rtc::RTCCertificate>& certificate,
     WebRtcSession* session,
-    const std::string& session_id,
-    cricket::DataChannelType dct)
-    : WebRtcSessionDescriptionFactory(
-        signaling_thread, channel_manager, mediastream_signaling, nullptr,
-        nullptr, session, session_id, dct, true) {
+    const std::string& session_id)
+    : WebRtcSessionDescriptionFactory(signaling_thread,
+                                      channel_manager,
+                                      nullptr,
+                                      nullptr,
+                                      session,
+                                      session_id,
+                                      true) {
   RTC_DCHECK(certificate);
 
   certificate_request_state_ = CERTIFICATE_WAITING;
@@ -264,9 +253,8 @@
 
 void WebRtcSessionDescriptionFactory::CreateOffer(
     CreateSessionDescriptionObserver* observer,
-    const PeerConnectionInterface::RTCOfferAnswerOptions& options) {
-  cricket::MediaSessionOptions session_options;
-
+    const PeerConnectionInterface::RTCOfferAnswerOptions& options,
+    const cricket::MediaSessionOptions& session_options) {
   std::string error = "CreateOffer";
   if (certificate_request_state_ == CERTIFICATE_FAILED) {
     error += kFailedDueToIdentityFailed;
@@ -275,14 +263,6 @@
     return;
   }
 
-  if (!mediastream_signaling_->GetOptionsForOffer(options,
-                                                  &session_options)) {
-    error += " called with invalid options.";
-    LOG(LS_ERROR) << error;
-    PostCreateSessionDescriptionFailed(observer, error);
-    return;
-  }
-
   if (!ValidStreams(session_options.streams)) {
     error += " called with invalid media streams.";
     LOG(LS_ERROR) << error;
@@ -290,11 +270,6 @@
     return;
   }
 
-  if (data_channel_type_ == cricket::DCT_SCTP &&
-      mediastream_signaling_->HasDataChannels()) {
-    session_options.data_channel_type = cricket::DCT_SCTP;
-  }
-
   CreateSessionDescriptionRequest request(
       CreateSessionDescriptionRequest::kOffer, observer, session_options);
   if (certificate_request_state_ == CERTIFICATE_WAITING) {
@@ -308,7 +283,8 @@
 
 void WebRtcSessionDescriptionFactory::CreateAnswer(
     CreateSessionDescriptionObserver* observer,
-    const MediaConstraintsInterface* constraints) {
+    const MediaConstraintsInterface* constraints,
+    const cricket::MediaSessionOptions& session_options) {
   std::string error = "CreateAnswer";
   if (certificate_request_state_ == CERTIFICATE_FAILED) {
     error += kFailedDueToIdentityFailed;
@@ -330,28 +306,15 @@
     return;
   }
 
-  cricket::MediaSessionOptions options;
-  if (!mediastream_signaling_->GetOptionsForAnswer(constraints, &options)) {
-    error += " called with invalid constraints.";
-    LOG(LS_ERROR) << error;
-    PostCreateSessionDescriptionFailed(observer, error);
-    return;
-  }
-  if (!ValidStreams(options.streams)) {
+  if (!ValidStreams(session_options.streams)) {
     error += " called with invalid media streams.";
     LOG(LS_ERROR) << error;
     PostCreateSessionDescriptionFailed(observer, error);
     return;
   }
-  // RTP data channel is handled in MediaSessionOptions::AddStream. SCTP streams
-  // are not signaled in the SDP so does not go through that path and must be
-  // handled here.
-  if (data_channel_type_ == cricket::DCT_SCTP) {
-    options.data_channel_type = cricket::DCT_SCTP;
-  }
 
   CreateSessionDescriptionRequest request(
-      CreateSessionDescriptionRequest::kAnswer, observer, options);
+      CreateSessionDescriptionRequest::kAnswer, observer, session_options);
   if (certificate_request_state_ == CERTIFICATE_WAITING) {
     create_session_description_requests_.push(request);
   } else {
diff --git a/talk/app/webrtc/webrtcsessiondescriptionfactory.h b/talk/app/webrtc/webrtcsessiondescriptionfactory.h
index 95fab63..91adc66 100644
--- a/talk/app/webrtc/webrtcsessiondescriptionfactory.h
+++ b/talk/app/webrtc/webrtcsessiondescriptionfactory.h
@@ -43,7 +43,6 @@
 namespace webrtc {
 class CreateSessionDescriptionObserver;
 class MediaConstraintsInterface;
-class MediaStreamSignaling;
 class SessionDescriptionInterface;
 class WebRtcSession;
 
@@ -92,32 +91,26 @@
   // Construct with DTLS disabled.
   WebRtcSessionDescriptionFactory(rtc::Thread* signaling_thread,
                                   cricket::ChannelManager* channel_manager,
-                                  MediaStreamSignaling* mediastream_signaling,
                                   WebRtcSession* session,
-                                  const std::string& session_id,
-                                  cricket::DataChannelType dct);
+                                  const std::string& session_id);
 
   // Construct with DTLS enabled using the specified |dtls_identity_store| to
   // generate a certificate.
   WebRtcSessionDescriptionFactory(
       rtc::Thread* signaling_thread,
       cricket::ChannelManager* channel_manager,
-      MediaStreamSignaling* mediastream_signaling,
       rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
       WebRtcSession* session,
-      const std::string& session_id,
-      cricket::DataChannelType dct);
+      const std::string& session_id);
 
   // Construct with DTLS enabled using the specified (already generated)
   // |certificate|.
   WebRtcSessionDescriptionFactory(
       rtc::Thread* signaling_thread,
       cricket::ChannelManager* channel_manager,
-      MediaStreamSignaling* mediastream_signaling,
       const rtc::scoped_refptr<rtc::RTCCertificate>& certificate,
       WebRtcSession* session,
-      const std::string& session_id,
-      cricket::DataChannelType dct);
+      const std::string& session_id);
   virtual ~WebRtcSessionDescriptionFactory();
 
   static void CopyCandidatesFromSessionDescription(
@@ -126,10 +119,11 @@
 
   void CreateOffer(
       CreateSessionDescriptionObserver* observer,
-      const PeerConnectionInterface::RTCOfferAnswerOptions& options);
-  void CreateAnswer(
-      CreateSessionDescriptionObserver* observer,
-      const MediaConstraintsInterface* constraints);
+      const PeerConnectionInterface::RTCOfferAnswerOptions& options,
+      const cricket::MediaSessionOptions& session_options);
+  void CreateAnswer(CreateSessionDescriptionObserver* observer,
+                    const MediaConstraintsInterface* constraints,
+                    const cricket::MediaSessionOptions& session_options);
 
   void SetSdesPolicy(cricket::SecurePolicy secure_policy);
   cricket::SecurePolicy SdesPolicy() const;
@@ -153,13 +147,11 @@
   WebRtcSessionDescriptionFactory(
       rtc::Thread* signaling_thread,
       cricket::ChannelManager* channel_manager,
-      MediaStreamSignaling* mediastream_signaling,
       rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
       const rtc::scoped_refptr<WebRtcIdentityRequestObserver>&
           identity_request_observer,
       WebRtcSession* session,
       const std::string& session_id,
-      cricket::DataChannelType dct,
       bool dtls_enabled);
 
   // MessageHandler implementation.
@@ -183,7 +175,6 @@
   std::queue<CreateSessionDescriptionRequest>
       create_session_description_requests_;
   rtc::Thread* const signaling_thread_;
-  MediaStreamSignaling* const mediastream_signaling_;
   cricket::TransportDescriptionFactory transport_desc_factory_;
   cricket::MediaSessionDescriptionFactory session_desc_factory_;
   uint64_t session_version_;
@@ -193,7 +184,6 @@
   // TODO(jiayl): remove the dependency on session once bug 2264 is fixed.
   WebRtcSession* const session_;
   const std::string session_id_;
-  const cricket::DataChannelType data_channel_type_;
   CertificateRequestState certificate_request_state_;
 
   RTC_DISALLOW_COPY_AND_ASSIGN(WebRtcSessionDescriptionFactory);
diff --git a/talk/libjingle_tests.gyp b/talk/libjingle_tests.gyp
index 366267c..2e42047 100755
--- a/talk/libjingle_tests.gyp
+++ b/talk/libjingle_tests.gyp
@@ -200,7 +200,6 @@
         'app/webrtc/jsepsessiondescription_unittest.cc',
         'app/webrtc/localaudiosource_unittest.cc',
         'app/webrtc/mediastream_unittest.cc',
-        'app/webrtc/mediastreamsignaling_unittest.cc',
         'app/webrtc/peerconnection_unittest.cc',
         'app/webrtc/peerconnectionendtoend_unittest.cc',
         'app/webrtc/peerconnectionfactory_unittest.cc',