Merge from Chromium at DEPS revision 03655fd3f6d7

This commit was generated by merge_to_master.py.

Change-Id: Ie97de41dee6631b70dd07c00db5bf3ad4dfe8e14
diff --git a/app/webrtc/objc/RTCEAGLVideoView+Internal.h b/app/webrtc/objc/RTCEAGLVideoView+Internal.h
deleted file mode 100644
index 10df2e3..0000000
--- a/app/webrtc/objc/RTCEAGLVideoView+Internal.h
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * libjingle
- * Copyright 2014, Google Inc.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions are met:
- *
- *  1. Redistributions of source code must retain the above copyright notice,
- *     this list of conditions and the following disclaimer.
- *  2. Redistributions in binary form must reproduce the above copyright notice,
- *     this list of conditions and the following disclaimer in the documentation
- *     and/or other materials provided with the distribution.
- *  3. The name of the author may not be used to endorse or promote products
- *     derived from this software without specific prior written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
- * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
- * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
- * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
- * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
- * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
- * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#import <Foundation/Foundation.h>
-
-#import "RTCEAGLVideoView.h"
-#import "RTCVideoRenderer.h"
-
-// TODO(tkchin): Move declaration to implementation file. Exposed here in order
-// to support deprecated methods in RTCVideoRenderer.
-@interface RTCEAGLVideoView (Internal) <RTCVideoRendererDelegate>
-@end
diff --git a/app/webrtc/objc/RTCEAGLVideoView.m b/app/webrtc/objc/RTCEAGLVideoView.m
index faacef6..c55c87e 100644
--- a/app/webrtc/objc/RTCEAGLVideoView.m
+++ b/app/webrtc/objc/RTCEAGLVideoView.m
@@ -29,13 +29,12 @@
 #error "This file requires ARC support."
 #endif
 
-#import "RTCEAGLVideoView+Internal.h"
+#import "RTCEAGLVideoView.h"
 
 #import <GLKit/GLKit.h>
 
+#import "RTCI420Frame.h"
 #import "RTCOpenGLVideoRenderer.h"
-#import "RTCVideoRenderer.h"
-#import "RTCVideoTrack.h"
 
 // RTCDisplayLinkTimer wraps a CADisplayLink and is set to fire every two screen
 // refreshes, which should be 30fps. We wrap the display link in order to avoid
@@ -105,7 +104,6 @@
   RTCDisplayLinkTimer* _timer;
   GLKView* _glkView;
   RTCOpenGLVideoRenderer* _glRenderer;
-  RTCVideoRenderer* _videoRenderer;
 }
 
 - (instancetype)initWithFrame:(CGRect)frame {
@@ -152,7 +150,6 @@
       // GLKViewDelegate method implemented below.
       [strongSelf.glkView setNeedsDisplay];
     }];
-    _videoRenderer = [[RTCVideoRenderer alloc] initWithDelegate:self];
     [self setupGL];
   }
   return self;
@@ -168,18 +165,6 @@
   [_timer invalidate];
 }
 
-- (void)setVideoTrack:(RTCVideoTrack*)videoTrack {
-  if (_videoTrack == videoTrack) {
-    return;
-  }
-  [_videoTrack removeRenderer:_videoRenderer];
-  self.i420Frame = nil;
-  _videoTrack = videoTrack;
-  [_videoTrack addRenderer:_videoRenderer];
-  // TODO(tkchin): potentially handle changes in track state - e.g. render
-  // black if track fails.
-}
-
 #pragma mark - UIView
 
 - (void)layoutSubviews {
@@ -197,14 +182,31 @@
   [_glRenderer drawFrame:self.i420Frame];
 }
 
+#pragma mark - RTCVideoRenderer
+
+// These methods may be called on non-main thread.
+- (void)setSize:(CGSize)size {
+  __weak RTCEAGLVideoView* weakSelf = self;
+  dispatch_async(dispatch_get_main_queue(), ^{
+    RTCEAGLVideoView* strongSelf = weakSelf;
+    [strongSelf.delegate videoView:strongSelf didChangeVideoSize:size];
+  });
+}
+
+- (void)renderFrame:(RTCI420Frame*)frame {
+  self.i420Frame = frame;
+}
+
 #pragma mark - Private
 
 - (void)setupGL {
+  self.i420Frame = nil;
   [_glRenderer setupGL];
   _timer.isPaused = NO;
 }
 
 - (void)teardownGL {
+  self.i420Frame = nil;
   _timer.isPaused = YES;
   [_glkView deleteDrawable];
   [_glRenderer teardownGL];
@@ -219,25 +221,3 @@
 }
 
 @end
-
-@implementation RTCEAGLVideoView (Internal)
-
-#pragma mark - RTCVideoRendererDelegate
-
-// These methods are called when the video track has frame information to
-// provide. This occurs on non-main thread.
-- (void)renderer:(RTCVideoRenderer*)renderer
-      didSetSize:(CGSize)size {
-  __weak RTCEAGLVideoView* weakSelf = self;
-  dispatch_async(dispatch_get_main_queue(), ^{
-    RTCEAGLVideoView* strongSelf = weakSelf;
-    [strongSelf.delegate videoView:strongSelf didChangeVideoSize:size];
-  });
-}
-
-- (void)renderer:(RTCVideoRenderer*)renderer
-    didReceiveFrame:(RTCI420Frame*)frame {
-  self.i420Frame = frame;
-}
-
-@end
diff --git a/app/webrtc/objc/RTCI420Frame.mm b/app/webrtc/objc/RTCI420Frame.mm
index 0b50691..9c394e5 100644
--- a/app/webrtc/objc/RTCI420Frame.mm
+++ b/app/webrtc/objc/RTCI420Frame.mm
@@ -78,6 +78,10 @@
   return _videoFrame->GetVPitch();
 }
 
+- (BOOL)makeExclusive {
+  return _videoFrame->MakeExclusive();
+}
+
 @end
 
 @implementation RTCI420Frame (Internal)
diff --git a/app/webrtc/objc/RTCMediaStream.mm b/app/webrtc/objc/RTCMediaStream.mm
index 27d20b8..a72508a 100644
--- a/app/webrtc/objc/RTCMediaStream.mm
+++ b/app/webrtc/objc/RTCMediaStream.mm
@@ -71,7 +71,7 @@
 }
 
 - (BOOL)addVideoTrack:(RTCVideoTrack*)track {
-  if (self.mediaStream->AddTrack(track.videoTrack)) {
+  if (self.mediaStream->AddTrack(track.nativeVideoTrack)) {
     [_videoTracks addObject:track];
     return YES;
   }
@@ -93,7 +93,8 @@
   NSUInteger index = [_videoTracks indexOfObjectIdenticalTo:track];
   NSAssert(index != NSNotFound,
            @"|removeAudioTrack| called on unexpected RTCVideoTrack");
-  if (index != NSNotFound && self.mediaStream->RemoveTrack(track.videoTrack)) {
+  if (index != NSNotFound &&
+      self.mediaStream->RemoveTrack(track.nativeVideoTrack)) {
     [_videoTracks removeObjectAtIndex:index];
     return YES;
   }
diff --git a/app/webrtc/objc/RTCNSGLVideoView.m b/app/webrtc/objc/RTCNSGLVideoView.m
index 292e792..7aa4a11 100644
--- a/app/webrtc/objc/RTCNSGLVideoView.m
+++ b/app/webrtc/objc/RTCNSGLVideoView.m
@@ -33,10 +33,10 @@
 
 #import <CoreVideo/CVDisplayLink.h>
 #import <OpenGL/gl3.h>
+#import "RTCI420Frame.h"
 #import "RTCOpenGLVideoRenderer.h"
-#import "RTCVideoRenderer.h"
 
-@interface RTCNSGLVideoView () <RTCVideoRendererDelegate>
+@interface RTCNSGLVideoView ()
 // |i420Frame| is set when we receive a frame from a worker thread and is read
 // from the display link callback so atomicity is required.
 @property(atomic, strong) RTCI420Frame* i420Frame;
@@ -57,15 +57,6 @@
 
 @implementation RTCNSGLVideoView {
   CVDisplayLinkRef _displayLink;
-  RTCVideoRenderer* _videoRenderer;
-}
-
-- (instancetype)initWithFrame:(NSRect)frame
-                  pixelFormat:(NSOpenGLPixelFormat*)format {
-  if (self = [super initWithFrame:frame pixelFormat:format]) {
-    _videoRenderer = [[RTCVideoRenderer alloc] initWithDelegate:self];
-  }
-  return self;
 }
 
 - (void)dealloc {
@@ -109,37 +100,16 @@
   [super clearGLContext];
 }
 
-- (void)setVideoTrack:(RTCVideoTrack*)videoTrack {
-  if (_videoTrack == videoTrack) {
-    return;
-  }
-  if (_videoTrack) {
-    [_videoTrack removeRenderer:_videoRenderer];
-    CVDisplayLinkStop(_displayLink);
-    // Clear contents.
-    self.i420Frame = nil;
-    [self drawFrame];
-  }
-  _videoTrack = videoTrack;
-  if (_videoTrack) {
-    [_videoTrack addRenderer:_videoRenderer];
-    CVDisplayLinkStart(_displayLink);
-  }
-}
+#pragma mark - RTCVideoRenderer
 
-#pragma mark - RTCVideoRendererDelegate
-
-// These methods are called when the video track has frame information to
-// provide. This occurs on non-main thread.
-- (void)renderer:(RTCVideoRenderer*)renderer
-      didSetSize:(CGSize)size {
+// These methods may be called on non-main thread.
+- (void)setSize:(CGSize)size {
   dispatch_async(dispatch_get_main_queue(), ^{
     [self.delegate videoView:self didChangeVideoSize:size];
   });
 }
 
-- (void)renderer:(RTCVideoRenderer*)renderer
-    didReceiveFrame:(RTCI420Frame*)frame {
+- (void)renderFrame:(RTCI420Frame*)frame {
   self.i420Frame = frame;
 }
 
@@ -174,9 +144,7 @@
   CGLPixelFormatObj cglPixelFormat = [[self pixelFormat] CGLPixelFormatObj];
   CVDisplayLinkSetCurrentCGDisplayFromOpenGLContext(
       _displayLink, cglContext, cglPixelFormat);
-  if (_videoTrack) {
-    CVDisplayLinkStart(_displayLink);
-  }
+  CVDisplayLinkStart(_displayLink);
 }
 
 - (void)teardownDisplayLink {
diff --git a/app/webrtc/objc/RTCPeerConnection.mm b/app/webrtc/objc/RTCPeerConnection.mm
index 925de73..7767f76 100644
--- a/app/webrtc/objc/RTCPeerConnection.mm
+++ b/app/webrtc/objc/RTCPeerConnection.mm
@@ -68,6 +68,7 @@
     [_delegate peerConnection:_peerConnection
         didCreateSessionDescription:session
                               error:nil];
+    delete desc;
   }
 
   virtual void OnFailure(const std::string& error) OVERRIDE {
diff --git a/app/webrtc/objc/RTCVideoRenderer+Internal.h b/app/webrtc/objc/RTCVideoRendererAdapter.h
similarity index 84%
rename from app/webrtc/objc/RTCVideoRenderer+Internal.h
rename to app/webrtc/objc/RTCVideoRendererAdapter.h
index 22e445c..faf0906 100644
--- a/app/webrtc/objc/RTCVideoRenderer+Internal.h
+++ b/app/webrtc/objc/RTCVideoRendererAdapter.h
@@ -29,8 +29,12 @@
 
 #include "talk/app/webrtc/mediastreaminterface.h"
 
-@interface RTCVideoRenderer (Internal)
+@interface RTCVideoRendererAdapter : NSObject
 
-@property(nonatomic, readonly) webrtc::VideoRendererInterface* videoRenderer;
+@property(nonatomic, readonly) id<RTCVideoRenderer> videoRenderer;
+@property(nonatomic, readonly)
+    webrtc::VideoRendererInterface* nativeVideoRenderer;
+
+- (instancetype)initWithVideoRenderer:(id<RTCVideoRenderer>)videoRenderer;
 
 @end
diff --git a/app/webrtc/objc/RTCVideoRenderer.mm b/app/webrtc/objc/RTCVideoRendererAdapter.mm
similarity index 70%
rename from app/webrtc/objc/RTCVideoRenderer.mm
rename to app/webrtc/objc/RTCVideoRendererAdapter.mm
index 4cfe43a..e29faad 100644
--- a/app/webrtc/objc/RTCVideoRenderer.mm
+++ b/app/webrtc/objc/RTCVideoRendererAdapter.mm
@@ -29,50 +29,46 @@
 #error "This file requires ARC support."
 #endif
 
-#import "RTCVideoRenderer+Internal.h"
+#import "RTCVideoRendererAdapter.h"
 #import "RTCI420Frame+Internal.h"
 
 namespace webrtc {
 
-class RTCVideoRendererAdapter : public VideoRendererInterface {
+class RTCVideoRendererNativeAdapter : public VideoRendererInterface {
  public:
-  RTCVideoRendererAdapter(RTCVideoRenderer* renderer) { _renderer = renderer; }
+  RTCVideoRendererNativeAdapter(RTCVideoRendererAdapter* adapter) {
+    _adapter = adapter;
+  }
 
   virtual void SetSize(int width, int height) OVERRIDE {
-    [_renderer.delegate renderer:_renderer
-                      didSetSize:CGSizeMake(width, height)];
+    [_adapter.videoRenderer setSize:CGSizeMake(width, height)];
   }
 
   virtual void RenderFrame(const cricket::VideoFrame* frame) OVERRIDE {
-    if (!_renderer.delegate) {
-      return;
-    }
     RTCI420Frame* i420Frame = [[RTCI420Frame alloc] initWithVideoFrame:frame];
-    [_renderer.delegate renderer:_renderer didReceiveFrame:i420Frame];
+    [_adapter.videoRenderer renderFrame:i420Frame];
   }
 
  private:
-  __weak RTCVideoRenderer* _renderer;
+  __weak RTCVideoRendererAdapter* _adapter;
 };
 }
 
-@implementation RTCVideoRenderer {
-  rtc::scoped_ptr<webrtc::RTCVideoRendererAdapter> _adapter;
+@implementation RTCVideoRendererAdapter {
+  id<RTCVideoRenderer> _videoRenderer;
+  rtc::scoped_ptr<webrtc::RTCVideoRendererNativeAdapter> _adapter;
 }
 
-- (instancetype)initWithDelegate:(id<RTCVideoRendererDelegate>)delegate {
+- (instancetype)initWithVideoRenderer:(id<RTCVideoRenderer>)videoRenderer {
+  NSParameterAssert(videoRenderer);
   if (self = [super init]) {
-    _delegate = delegate;
-    _adapter.reset(new webrtc::RTCVideoRendererAdapter(self));
+    _videoRenderer = videoRenderer;
+    _adapter.reset(new webrtc::RTCVideoRendererNativeAdapter(self));
   }
   return self;
 }
 
-@end
-
-@implementation RTCVideoRenderer (Internal)
-
-- (webrtc::VideoRendererInterface*)videoRenderer {
+- (webrtc::VideoRendererInterface*)nativeVideoRenderer {
   return _adapter.get();
 }
 
diff --git a/app/webrtc/objc/RTCVideoTrack+Internal.h b/app/webrtc/objc/RTCVideoTrack+Internal.h
index 03c8f95..5f267ac 100644
--- a/app/webrtc/objc/RTCVideoTrack+Internal.h
+++ b/app/webrtc/objc/RTCVideoTrack+Internal.h
@@ -34,7 +34,7 @@
 
 @interface RTCVideoTrack (Internal)
 
-@property(nonatomic, assign, readonly)
-    rtc::scoped_refptr<webrtc::VideoTrackInterface> videoTrack;
+@property(nonatomic, readonly)
+    rtc::scoped_refptr<webrtc::VideoTrackInterface> nativeVideoTrack;
 
 @end
diff --git a/app/webrtc/objc/RTCVideoTrack.mm b/app/webrtc/objc/RTCVideoTrack.mm
index beebde0..959bc6d 100644
--- a/app/webrtc/objc/RTCVideoTrack.mm
+++ b/app/webrtc/objc/RTCVideoTrack.mm
@@ -32,46 +32,55 @@
 #import "RTCVideoTrack+Internal.h"
 
 #import "RTCMediaStreamTrack+Internal.h"
-#import "RTCVideoRenderer+Internal.h"
+#import "RTCVideoRendererAdapter.h"
 
 @implementation RTCVideoTrack {
-  NSMutableArray* _rendererArray;
+  NSMutableArray* _adapters;
 }
 
 - (id)initWithMediaTrack:
           (rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)
       mediaTrack {
   if (self = [super initWithMediaTrack:mediaTrack]) {
-    _rendererArray = [NSMutableArray array];
+    _adapters = [NSMutableArray array];
   }
   return self;
 }
 
-- (void)addRenderer:(RTCVideoRenderer*)renderer {
-  NSAssert1(![self.renderers containsObject:renderer],
-            @"renderers already contains object [%@]",
-            [renderer description]);
-  [_rendererArray addObject:renderer];
-  self.videoTrack->AddRenderer(renderer.videoRenderer);
-}
-
-- (void)removeRenderer:(RTCVideoRenderer*)renderer {
-  NSUInteger index = [self.renderers indexOfObjectIdenticalTo:renderer];
-  if (index != NSNotFound) {
-    [_rendererArray removeObjectAtIndex:index];
-    self.videoTrack->RemoveRenderer(renderer.videoRenderer);
+- (void)addRenderer:(id<RTCVideoRenderer>)renderer {
+  // Make sure we don't have this renderer yet.
+  for (RTCVideoRendererAdapter* adapter in _adapters) {
+    NSParameterAssert(adapter.videoRenderer != renderer);
   }
+  // Create a wrapper that provides a native pointer for us.
+  RTCVideoRendererAdapter* adapter =
+      [[RTCVideoRendererAdapter alloc] initWithVideoRenderer:renderer];
+  [_adapters addObject:adapter];
+  self.nativeVideoTrack->AddRenderer(adapter.nativeVideoRenderer);
 }
 
-- (NSArray*)renderers {
-  return [_rendererArray copy];
+- (void)removeRenderer:(id<RTCVideoRenderer>)renderer {
+  RTCVideoRendererAdapter* adapter = nil;
+  NSUInteger indexToRemove = NSNotFound;
+  for (NSUInteger i = 0; i < _adapters.count; i++) {
+    adapter = _adapters[i];
+    if (adapter.videoRenderer == renderer) {
+      indexToRemove = i;
+      break;
+    }
+  }
+  if (indexToRemove == NSNotFound) {
+    return;
+  }
+  self.nativeVideoTrack->RemoveRenderer(adapter.nativeVideoRenderer);
+  [_adapters removeObjectAtIndex:indexToRemove];
 }
 
 @end
 
 @implementation RTCVideoTrack (Internal)
 
-- (rtc::scoped_refptr<webrtc::VideoTrackInterface>)videoTrack {
+- (rtc::scoped_refptr<webrtc::VideoTrackInterface>)nativeVideoTrack {
   return static_cast<webrtc::VideoTrackInterface*>(self.mediaTrack.get());
 }
 
diff --git a/app/webrtc/objc/public/RTCEAGLVideoView.h b/app/webrtc/objc/public/RTCEAGLVideoView.h
index c38799e..526175f 100644
--- a/app/webrtc/objc/public/RTCEAGLVideoView.h
+++ b/app/webrtc/objc/public/RTCEAGLVideoView.h
@@ -37,11 +37,10 @@
 
 @end
 
-@class RTCVideoTrack;
-// RTCEAGLVideoView renders |videoTrack| onto itself using OpenGLES.
-@interface RTCEAGLVideoView : UIView
+// RTCEAGLVideoView is an RTCVideoRenderer which renders i420 frames in its
+// bounds using OpenGLES 2.0.
+@interface RTCEAGLVideoView : UIView <RTCVideoRenderer>
 
-@property(nonatomic, strong) RTCVideoTrack* videoTrack;
 @property(nonatomic, weak) id<RTCEAGLVideoViewDelegate> delegate;
 
 @end
diff --git a/app/webrtc/objc/public/RTCI420Frame.h b/app/webrtc/objc/public/RTCI420Frame.h
index 737968c..7a8c4d4 100644
--- a/app/webrtc/objc/public/RTCI420Frame.h
+++ b/app/webrtc/objc/public/RTCI420Frame.h
@@ -43,6 +43,8 @@
 @property(nonatomic, readonly) NSInteger uPitch;
 @property(nonatomic, readonly) NSInteger vPitch;
 
+- (BOOL)makeExclusive;
+
 #ifndef DOXYGEN_SHOULD_SKIP_THIS
 // Disallow init and don't add to documentation
 - (id)init __attribute__((
diff --git a/app/webrtc/objc/public/RTCNSGLVideoView.h b/app/webrtc/objc/public/RTCNSGLVideoView.h
index fd757cb..0af2dc5 100644
--- a/app/webrtc/objc/public/RTCNSGLVideoView.h
+++ b/app/webrtc/objc/public/RTCNSGLVideoView.h
@@ -31,7 +31,7 @@
 
 #import <AppKit/NSOpenGLView.h>
 
-#import "RTCVideoTrack.h"
+#import "RTCVideoRenderer.h"
 
 @class RTCNSGLVideoView;
 @protocol RTCNSGLVideoViewDelegate
@@ -40,9 +40,8 @@
 
 @end
 
-@interface RTCNSGLVideoView : NSOpenGLView
+@interface RTCNSGLVideoView : NSOpenGLView <RTCVideoRenderer>
 
-@property(nonatomic, strong) RTCVideoTrack* videoTrack;
 @property(nonatomic, weak) id<RTCNSGLVideoViewDelegate> delegate;
 
 @end
diff --git a/app/webrtc/objc/public/RTCVideoRenderer.h b/app/webrtc/objc/public/RTCVideoRenderer.h
index 37977ce..3c2baba 100644
--- a/app/webrtc/objc/public/RTCVideoRenderer.h
+++ b/app/webrtc/objc/public/RTCVideoRenderer.h
@@ -31,34 +31,13 @@
 #endif
 
 @class RTCI420Frame;
-@class RTCVideoRenderer;
 
-// RTCVideoRendererDelegate is a protocol for an object that must be
-// implemented to get messages when rendering.
-@protocol RTCVideoRendererDelegate<NSObject>
+@protocol RTCVideoRenderer<NSObject>
 
 // The size of the frame.
-- (void)renderer:(RTCVideoRenderer*)renderer didSetSize:(CGSize)size;
+- (void)setSize:(CGSize)size;
 
 // The frame to be displayed.
-- (void)renderer:(RTCVideoRenderer*)renderer
-    didReceiveFrame:(RTCI420Frame*)frame;
-
-@end
-
-// Interface for rendering VideoFrames from a VideoTrack
-@interface RTCVideoRenderer : NSObject
-
-@property(nonatomic, weak) id<RTCVideoRendererDelegate> delegate;
-
-// Initialize the renderer.  Requires a delegate which does the actual drawing
-// of frames.
-- (instancetype)initWithDelegate:(id<RTCVideoRendererDelegate>)delegate;
-
-#ifndef DOXYGEN_SHOULD_SKIP_THIS
-// Disallow init and don't add to documentation
-- (id)init __attribute__((
-    unavailable("init is not a supported initializer for this class.")));
-#endif /* DOXYGEN_SHOULD_SKIP_THIS */
+- (void)renderFrame:(RTCI420Frame*)frame;
 
 @end
diff --git a/app/webrtc/objc/public/RTCVideoTrack.h b/app/webrtc/objc/public/RTCVideoTrack.h
index 291c923..8385b71 100644
--- a/app/webrtc/objc/public/RTCVideoTrack.h
+++ b/app/webrtc/objc/public/RTCVideoTrack.h
@@ -27,19 +27,16 @@
 
 #import "RTCMediaStreamTrack.h"
 
-@class RTCVideoRenderer;
+@protocol RTCVideoRenderer;
 
 // RTCVideoTrack is an ObjectiveC wrapper for VideoTrackInterface.
 @interface RTCVideoTrack : RTCMediaStreamTrack
 
-// The currently registered renderers.
-@property(nonatomic, strong, readonly) NSArray *renderers;
-
 // Register a renderer that will render all frames received on this track.
-- (void)addRenderer:(RTCVideoRenderer *)renderer;
+- (void)addRenderer:(id<RTCVideoRenderer>)renderer;
 
 // Deregister a renderer.
-- (void)removeRenderer:(RTCVideoRenderer *)renderer;
+- (void)removeRenderer:(id<RTCVideoRenderer>)renderer;
 
 #ifndef DOXYGEN_SHOULD_SKIP_THIS
 // Disallow init and don't add to documentation
diff --git a/app/webrtc/objctests/RTCPeerConnectionTest.mm b/app/webrtc/objctests/RTCPeerConnectionTest.mm
index 6c5950b..92d3c49 100644
--- a/app/webrtc/objctests/RTCPeerConnectionTest.mm
+++ b/app/webrtc/objctests/RTCPeerConnectionTest.mm
@@ -46,6 +46,16 @@
 #error "This file requires ARC support."
 #endif
 
+@interface RTCFakeRenderer : NSObject <RTCVideoRenderer>
+@end
+
+@implementation RTCFakeRenderer
+
+- (void)setSize:(CGSize)size {}
+- (void)renderFrame:(RTCI420Frame*)frame {}
+
+@end
+
 @interface RTCPeerConnectionTest : NSObject
 
 // Returns whether the two sessions are of the same type.
@@ -80,8 +90,7 @@
   RTCMediaStream* localMediaStream = [factory mediaStreamWithLabel:streamLabel];
   RTCVideoTrack* videoTrack =
       [factory videoTrackWithID:videoTrackID source:videoSource];
-  RTCVideoRenderer* videoRenderer =
-      [[RTCVideoRenderer alloc] initWithDelegate:nil];
+  RTCFakeRenderer* videoRenderer = [[RTCFakeRenderer alloc] init];
   [videoTrack addRenderer:videoRenderer];
   [localMediaStream addVideoTrack:videoTrack];
   // Test that removal/re-add works.
diff --git a/app/webrtc/peerconnection.cc b/app/webrtc/peerconnection.cc
index b64caf7..64ddcad 100644
--- a/app/webrtc/peerconnection.cc
+++ b/app/webrtc/peerconnection.cc
@@ -420,11 +420,6 @@
   return true;
 }
 
-bool PeerConnection::AddStream(MediaStreamInterface* local_stream,
-                               const MediaConstraintsInterface* constraints) {
-  return AddStream(local_stream);
-}
-
 void PeerConnection::RemoveStream(MediaStreamInterface* local_stream) {
   mediastream_signaling_->RemoveLocalStream(local_stream);
   if (IsClosed()) {
diff --git a/app/webrtc/peerconnection.h b/app/webrtc/peerconnection.h
index 355211c..68aa154 100644
--- a/app/webrtc/peerconnection.h
+++ b/app/webrtc/peerconnection.h
@@ -66,8 +66,6 @@
   virtual rtc::scoped_refptr<StreamCollectionInterface> local_streams();
   virtual rtc::scoped_refptr<StreamCollectionInterface> remote_streams();
   virtual bool AddStream(MediaStreamInterface* local_stream);
-  virtual bool AddStream(MediaStreamInterface* local_stream,
-                         const MediaConstraintsInterface* constraints);
   virtual void RemoveStream(MediaStreamInterface* local_stream);
 
   virtual rtc::scoped_refptr<DtmfSenderInterface> CreateDtmfSender(
diff --git a/app/webrtc/peerconnectioninterface.h b/app/webrtc/peerconnectioninterface.h
index 68b7879..73a4812 100644
--- a/app/webrtc/peerconnectioninterface.h
+++ b/app/webrtc/peerconnectioninterface.h
@@ -255,15 +255,7 @@
   // Add a new MediaStream to be sent on this PeerConnection.
   // Note that a SessionDescription negotiation is needed before the
   // remote peer can receive the stream.
-  // TODO(perkj): Make pure virtual once Chrome mocks have implemented.
-  virtual bool AddStream(MediaStreamInterface* stream) { return false;}
-
-  // Deprecated:
-  // TODO(perkj): Remove once its not used by Chrome.
-  virtual bool AddStream(MediaStreamInterface* stream,
-                         const MediaConstraintsInterface* constraints) {
-    return false;
-  }
+  virtual bool AddStream(MediaStreamInterface* stream) = 0;
 
   // Remove a MediaStream from this PeerConnection.
   // Note that a SessionDescription negotiation is need before the
@@ -351,10 +343,6 @@
     kIceState,
   };
 
-  // Deprecated.
-  // TODO(perkj): Remove once its not used by Chrome.
-  virtual void OnError() {}
-
   // Triggered when the SignalingState changed.
   virtual void OnSignalingChange(
      PeerConnectionInterface::SignalingState new_state) {}
diff --git a/app/webrtc/peerconnectionproxy.h b/app/webrtc/peerconnectionproxy.h
index 571c676..852d852 100644
--- a/app/webrtc/peerconnectionproxy.h
+++ b/app/webrtc/peerconnectionproxy.h
@@ -40,8 +40,6 @@
   PROXY_METHOD0(rtc::scoped_refptr<StreamCollectionInterface>,
                 remote_streams)
   PROXY_METHOD1(bool, AddStream, MediaStreamInterface*)
-  PROXY_METHOD2(bool, AddStream, MediaStreamInterface*,
-                const MediaConstraintsInterface*);
   PROXY_METHOD1(void, RemoveStream, MediaStreamInterface*)
   PROXY_METHOD1(rtc::scoped_refptr<DtmfSenderInterface>,
                 CreateDtmfSender, AudioTrackInterface*)
diff --git a/app/webrtc/webrtcsdp.cc b/app/webrtc/webrtcsdp.cc
index 23b8f3d..c7b34c4 100644
--- a/app/webrtc/webrtcsdp.cc
+++ b/app/webrtc/webrtcsdp.cc
@@ -206,7 +206,9 @@
 // draft-ietf-mmusic-trickle-ice-01
 // When no candidates have been gathered, set the connection
 // address to IP6 ::.
-static const char kDummyAddress[] = "::";
+// TODO(perkj): FF can not parse IP6 ::. See http://crbug/430333
+// Use IPV4 per default.
+static const char kDummyAddress[] = "0.0.0.0";
 static const char kDummyPort[] = "9";
 // RFC 3556
 static const char kApplicationSpecificMaximum[] = "AS";
@@ -675,7 +677,7 @@
     const std::vector<Candidate>& candidates,
     int component_id, std::string* port,
     std::string* ip, std::string* addr_type) {
-  *addr_type = kConnectionIpv6Addrtype;
+  *addr_type = kConnectionIpv4Addrtype;
   *port = kDummyPort;
   *ip = kDummyAddress;
   int current_preference = kPreferenceUnknown;
diff --git a/app/webrtc/webrtcsdp_unittest.cc b/app/webrtc/webrtcsdp_unittest.cc
index ea590db..8655487 100644
--- a/app/webrtc/webrtcsdp_unittest.cc
+++ b/app/webrtc/webrtcsdp_unittest.cc
@@ -213,8 +213,8 @@
     "t=0 0\r\n"
     "a=msid-semantic: WMS local_stream_1 local_stream_2\r\n"
     "m=audio 9 RTP/SAVPF 111 103 104\r\n"
-    "c=IN IP6 ::\r\n"
-    "a=rtcp:9 IN IP6 ::\r\n"
+    "c=IN IP4 0.0.0.0\r\n"
+    "a=rtcp:9 IN IP4 0.0.0.0\r\n"
     "a=ice-ufrag:ufrag_voice\r\na=ice-pwd:pwd_voice\r\n"
     "a=mid:audio_content_name\r\n"
     "a=sendrecv\r\n"
@@ -234,8 +234,8 @@
     "a=ssrc:4 mslabel:local_stream_2\r\n"
     "a=ssrc:4 label:audio_track_id_2\r\n"
     "m=video 9 RTP/SAVPF 120\r\n"
-    "c=IN IP6 ::\r\n"
-    "a=rtcp:9 IN IP6 ::\r\n"
+    "c=IN IP4 0.0.0.0\r\n"
+    "a=rtcp:9 IN IP4 0.0.0.0\r\n"
     "a=ice-ufrag:ufrag_video\r\na=ice-pwd:pwd_video\r\n"
     "a=mid:video_content_name\r\n"
     "a=sendrecv\r\n"
@@ -262,8 +262,8 @@
 
 static const char kSdpRtpDataChannelString[] =
     "m=application 9 RTP/SAVPF 101\r\n"
-    "c=IN IP6 ::\r\n"
-    "a=rtcp:9 IN IP6 ::\r\n"
+    "c=IN IP4 0.0.0.0\r\n"
+    "a=rtcp:9 IN IP4 0.0.0.0\r\n"
     "a=ice-ufrag:ufrag_data\r\n"
     "a=ice-pwd:pwd_data\r\n"
     "a=mid:data_content_name\r\n"
@@ -278,7 +278,7 @@
 
 static const char kSdpSctpDataChannelString[] =
     "m=application 9 DTLS/SCTP 5000\r\n"
-    "c=IN IP6 ::\r\n"
+    "c=IN IP4 0.0.0.0\r\n"
     "a=ice-ufrag:ufrag_data\r\n"
     "a=ice-pwd:pwd_data\r\n"
     "a=mid:data_content_name\r\n"
@@ -289,7 +289,7 @@
     "m=application 9 DTLS/SCTP webrtc-datachannel\r\n"
     "a=fmtp:webrtc-datachannel max-message-size=100000\r\n"
     "a=sctp-port 5000\r\n"
-    "c=IN IP6 ::\r\n"
+    "c=IN IP4 0.0.0.0\r\n"
     "a=ice-ufrag:ufrag_data\r\n"
     "a=ice-pwd:pwd_data\r\n"
     "a=mid:data_content_name\r\n";
@@ -316,10 +316,10 @@
     "t=0 0\r\n"
     "a=msid-semantic: WMS\r\n"
     "m=audio 9 RTP/SAVPF 111 103 104\r\n"
-    "c=IN IP6 ::\r\n"
+    "c=IN IP4 0.0.0.0\r\n"
     "a=x-google-flag:conference\r\n"
     "m=video 9 RTP/SAVPF 120\r\n"
-    "c=IN IP6 ::\r\n"
+    "c=IN IP4 0.0.0.0\r\n"
     "a=x-google-flag:conference\r\n";
 
 static const char kSdpSessionString[] =
@@ -331,8 +331,8 @@
 
 static const char kSdpAudioString[] =
     "m=audio 9 RTP/SAVPF 111\r\n"
-    "c=IN IP6 ::\r\n"
-    "a=rtcp:9 IN IP6 ::\r\n"
+    "c=IN IP4 0.0.0.0\r\n"
+    "a=rtcp:9 IN IP4 0.0.0.0\r\n"
     "a=ice-ufrag:ufrag_voice\r\na=ice-pwd:pwd_voice\r\n"
     "a=mid:audio_content_name\r\n"
     "a=sendrecv\r\n"
@@ -344,8 +344,8 @@
 
 static const char kSdpVideoString[] =
     "m=video 9 RTP/SAVPF 120\r\n"
-    "c=IN IP6 ::\r\n"
-    "a=rtcp:9 IN IP6 ::\r\n"
+    "c=IN IP4 0.0.0.0\r\n"
+    "a=rtcp:9 IN IP4 0.0.0.0\r\n"
     "a=ice-ufrag:ufrag_video\r\na=ice-pwd:pwd_video\r\n"
     "a=mid:video_content_name\r\n"
     "a=sendrecv\r\n"
@@ -1596,7 +1596,7 @@
   // TODO(pthatcher): We need to temporarily allow the SDP to control
   // this for backwards-compatibility.  Once we don't need that any
   // more, remove this.
-  InjectAfter("m=application 9 RTP/SAVPF 101\r\nc=IN IP6 ::\r\n",
+  InjectAfter("m=application 9 RTP/SAVPF 101\r\nc=IN IP4 0.0.0.0\r\n",
               "b=AS:100\r\n",
               &expected_sdp);
   EXPECT_EQ(expected_sdp, message);
diff --git a/examples/android/AndroidManifest.xml b/examples/android/AndroidManifest.xml
index 30fd46c..e24942a 100644
--- a/examples/android/AndroidManifest.xml
+++ b/examples/android/AndroidManifest.xml
@@ -7,7 +7,7 @@
     <uses-feature android:name="android.hardware.camera" />
     <uses-feature android:name="android.hardware.camera.autofocus" />
     <uses-feature android:glEsVersion="0x00020000" android:required="true" />
-    <uses-sdk android:minSdkVersion="13" android:targetSdkVersion="19" />
+    <uses-sdk android:minSdkVersion="13" android:targetSdkVersion="21" />
 
     <uses-permission android:name="android.permission.CAMERA" />
     <uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS" />
@@ -43,7 +43,7 @@
                   android:label="@string/app_name"
                   android:screenOrientation="fullUser"
                   android:configChanges="orientation|screenSize"
-                  android:theme="@android:style/Theme.Black.NoTitleBar.Fullscreen">
+                  android:theme="@style/AppRTCDemoActivityTheme">
         </activity>
     </application>
 </manifest>
diff --git a/examples/android/res/values-v21/styles.xml b/examples/android/res/values-v21/styles.xml
new file mode 100644
index 0000000..95f1ac6
--- /dev/null
+++ b/examples/android/res/values-v21/styles.xml
@@ -0,0 +1,8 @@
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+  <style name="AppRTCDemoActivityTheme" parent="android:Theme.Material">
+    <item name="android:windowActionBar">false</item>
+    <item name="android:windowFullscreen">true</item>
+    <item name="android:windowNoTitle">true</item>
+  </style>
+</resources>
diff --git a/examples/android/res/values/styles.xml b/examples/android/res/values/styles.xml
new file mode 100644
index 0000000..7f809a6
--- /dev/null
+++ b/examples/android/res/values/styles.xml
@@ -0,0 +1,8 @@
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+  <style name="AppRTCDemoActivityTheme" parent="android:Theme.Black">
+    <item name="android:windowActionBar">false</item>
+    <item name="android:windowFullscreen">true</item>
+    <item name="android:windowNoTitle">true</item>
+  </style>
+</resources>
diff --git a/examples/android/src/org/appspot/apprtc/AppRTCAudioManager.java b/examples/android/src/org/appspot/apprtc/AppRTCAudioManager.java
new file mode 100644
index 0000000..b2a1a44
--- /dev/null
+++ b/examples/android/src/org/appspot/apprtc/AppRTCAudioManager.java
@@ -0,0 +1,108 @@
+/*
+ * libjingle
+ * Copyright 2013, Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.appspot.apprtc;
+
+import android.content.Context;
+import android.media.AudioManager;
+import android.util.Log;
+
+/**
+ * AppRTCAudioManager manages all audio related parts of the AppRTC demo.
+ * TODO(henrika): add support for device enumeration, device selection etc.
+ */
+public class AppRTCAudioManager {
+  private static final String TAG = "AppRTCAudioManager";
+
+  private boolean initialized = false;
+  private AudioManager audioManager;
+  private int savedAudioMode = AudioManager.MODE_INVALID;
+  private boolean savedIsSpeakerPhoneOn = false;
+  private boolean savedIsMicrophoneMute = false;
+
+  /** Construction */
+  static AppRTCAudioManager create(Context context) {
+    return new AppRTCAudioManager(context);
+  }
+
+  private AppRTCAudioManager(Context context) {
+    Log.d(TAG, "AppRTCAudioManager");
+    audioManager = ((AudioManager) context.getSystemService(
+        Context.AUDIO_SERVICE));
+  }
+
+  public void init() {
+    Log.d(TAG, "init");
+    if (initialized) {
+      return;
+    }
+
+    // Store current audio state so we can restore it when close() is called.
+    savedAudioMode = audioManager.getMode();
+    savedIsSpeakerPhoneOn = audioManager.isSpeakerphoneOn();
+    savedIsMicrophoneMute = audioManager.isMicrophoneMute();
+
+    // The AppRTC demo shall always run in COMMUNICATION mode since it will
+    // result in best possible "VoIP settings", like audio routing, volume
+    // control etc.
+    audioManager.setMode(AudioManager.MODE_IN_COMMUNICATION);
+
+    initialized = true;
+  }
+
+  public void close() {
+    Log.d(TAG, "close");
+    if (!initialized) {
+      return;
+    }
+
+    // Restore previously stored audio states.
+    setSpeakerphoneOn(savedIsSpeakerPhoneOn);
+    setMicrophoneMute(savedIsMicrophoneMute);
+    audioManager.setMode(savedAudioMode);
+
+    initialized = false;
+  }
+
+  /** Sets the speaker phone mode. */
+  private void setSpeakerphoneOn(boolean on) {
+    boolean wasOn = audioManager.isSpeakerphoneOn();
+    if (wasOn == on) {
+      return;
+    }
+    audioManager.setSpeakerphoneOn(on);
+  }
+
+  /** Sets the microphone mute state. */
+  private void setMicrophoneMute(boolean on) {
+    boolean wasMuted = audioManager.isMicrophoneMute();
+    if (wasMuted == on) {
+      return;
+    }
+    audioManager.setMicrophoneMute(on);
+  }
+}
diff --git a/examples/android/src/org/appspot/apprtc/AppRTCDemoActivity.java b/examples/android/src/org/appspot/apprtc/AppRTCDemoActivity.java
index ad0e2d5..3ad26af 100644
--- a/examples/android/src/org/appspot/apprtc/AppRTCDemoActivity.java
+++ b/examples/android/src/org/appspot/apprtc/AppRTCDemoActivity.java
@@ -34,7 +34,6 @@
 import android.content.Intent;
 import android.content.pm.ActivityInfo;
 import android.graphics.Color;
-import android.media.AudioManager;
 import android.net.Uri;
 import android.opengl.GLSurfaceView;
 import android.os.Bundle;
@@ -72,6 +71,7 @@
   private PeerConnectionClient pc;
   private AppRTCClient appRtcClient = new GAERTCClient(this, this);
   private AppRTCSignalingParameters appRtcParameters;
+  private AppRTCAudioManager audioManager = null;
   private View rootView;
   private View menuBar;
   private GLSurfaceView videoView;
@@ -187,15 +187,9 @@
     hudView.setVisibility(View.INVISIBLE);
     addContentView(hudView, hudLayout);
 
-    AudioManager audioManager =
-        ((AudioManager) getSystemService(AUDIO_SERVICE));
-    // TODO(fischman): figure out how to do this Right(tm) and remove the
-    // suppression.
-    @SuppressWarnings("deprecation")
-    boolean isWiredHeadsetOn = audioManager.isWiredHeadsetOn();
-    audioManager.setMode(isWiredHeadsetOn ?
-        AudioManager.MODE_IN_CALL : AudioManager.MODE_IN_COMMUNICATION);
-    audioManager.setSpeakerphoneOn(!isWiredHeadsetOn);
+    // Create and audio manager that will take care of audio routing,
+    // audio modes, audio device enumeration etc.
+    audioManager = AppRTCAudioManager.create(this);
 
     final Intent intent = getIntent();
     Uri url = intent.getData();
@@ -253,6 +247,10 @@
   @Override
   protected void onDestroy() {
     disconnect();
+    if (audioManager != null) {
+      audioManager.close();
+      audioManager = null;
+    }
     super.onDestroy();
   }
 
@@ -360,6 +358,12 @@
   // All events are called from UI thread.
   @Override
   public void onConnectedToRoom(final AppRTCSignalingParameters params) {
+    if (audioManager != null) {
+      // Store existing audio settings and change audio mode to
+      // MODE_IN_COMMUNICATION for best possible VoIP performance.
+      logAndToast("Initializing the audio manager...");
+      audioManager.init();
+    }
     appRtcParameters = params;
     abortUnless(PeerConnectionFactory.initializeAndroidGlobals(
       this, true, true, VideoRendererGui.getEGLContext()),
diff --git a/examples/objc/AppRTCDemo/ios/APPRTCViewController.m b/examples/objc/AppRTCDemo/ios/APPRTCViewController.m
index 8042762..d8d9714 100644
--- a/examples/objc/AppRTCDemo/ios/APPRTCViewController.m
+++ b/examples/objc/AppRTCDemo/ios/APPRTCViewController.m
@@ -34,6 +34,7 @@
 #import <AVFoundation/AVFoundation.h>
 #import "APPRTCConnectionManager.h"
 #import "RTCEAGLVideoView.h"
+#import "RTCVideoTrack.h"
 
 // Padding space for local video view with its parent.
 static CGFloat const kLocalViewPadding = 20;
@@ -47,6 +48,8 @@
 
 @implementation APPRTCViewController {
   APPRTCConnectionManager* _connectionManager;
+  RTCVideoTrack* _localVideoTrack;
+  RTCVideoTrack* _remoteVideoTrack;
   CGSize _localVideoSize;
   CGSize _remoteVideoSize;
 }
@@ -101,13 +104,15 @@
 
 - (void)connectionManager:(APPRTCConnectionManager*)manager
     didReceiveLocalVideoTrack:(RTCVideoTrack*)localVideoTrack {
+  _localVideoTrack = localVideoTrack;
+  [_localVideoTrack addRenderer:self.localVideoView];
   self.localVideoView.hidden = NO;
-  self.localVideoView.videoTrack = localVideoTrack;
 }
 
 - (void)connectionManager:(APPRTCConnectionManager*)manager
     didReceiveRemoteVideoTrack:(RTCVideoTrack*)remoteVideoTrack {
-  self.remoteVideoView.videoTrack = remoteVideoTrack;
+  _remoteVideoTrack = remoteVideoTrack;
+  [_remoteVideoTrack addRenderer:self.remoteVideoView];
 }
 
 - (void)connectionManagerDidReceiveHangup:(APPRTCConnectionManager*)manager {
@@ -193,8 +198,16 @@
   self.instructionsView.hidden = NO;
   self.logView.hidden = YES;
   self.logView.text = nil;
-  self.localVideoView.videoTrack = nil;
-  self.remoteVideoView.videoTrack = nil;
+  if (_localVideoTrack) {
+    [_localVideoTrack removeRenderer:self.localVideoView];
+    _localVideoTrack = nil;
+    [self.localVideoView renderFrame:nil];
+  }
+  if (_remoteVideoTrack) {
+    [_remoteVideoTrack removeRenderer:self.remoteVideoView];
+    _remoteVideoTrack = nil;
+    [self.remoteVideoView renderFrame:nil];
+  }
   self.blackView.hidden = YES;
 }
 
diff --git a/examples/objc/AppRTCDemo/mac/APPRTCViewController.m b/examples/objc/AppRTCDemo/mac/APPRTCViewController.m
index cf5b836..08acac9 100644
--- a/examples/objc/AppRTCDemo/mac/APPRTCViewController.m
+++ b/examples/objc/AppRTCDemo/mac/APPRTCViewController.m
@@ -30,6 +30,7 @@
 #import <AVFoundation/AVFoundation.h>
 #import "APPRTCConnectionManager.h"
 #import "RTCNSGLVideoView.h"
+#import "RTCVideoTrack.h"
 
 static NSUInteger const kContentWidth = 1280;
 static NSUInteger const kContentHeight = 720;
@@ -227,6 +228,8 @@
 
 @implementation APPRTCViewController {
   APPRTCConnectionManager* _connectionManager;
+  RTCVideoTrack* _localVideoTrack;
+  RTCVideoTrack* _remoteVideoTrack;
 }
 
 - (instancetype)initWithNibName:(NSString*)nibName
@@ -258,12 +261,13 @@
 
 - (void)connectionManager:(APPRTCConnectionManager*)manager
     didReceiveLocalVideoTrack:(RTCVideoTrack*)localVideoTrack {
-  self.mainView.localVideoView.videoTrack = localVideoTrack;
+  _localVideoTrack = localVideoTrack;
 }
 
 - (void)connectionManager:(APPRTCConnectionManager*)manager
     didReceiveRemoteVideoTrack:(RTCVideoTrack*)remoteVideoTrack {
-  self.mainView.remoteVideoView.videoTrack = remoteVideoTrack;
+  _remoteVideoTrack = remoteVideoTrack;
+  [_remoteVideoTrack addRenderer:self.mainView.remoteVideoView];
 }
 
 - (void)connectionManagerDidReceiveHangup:(APPRTCConnectionManager*)manager {
@@ -305,7 +309,9 @@
 }
 
 - (void)disconnect {
-  self.mainView.remoteVideoView.videoTrack = nil;
+  [_remoteVideoTrack removeRenderer:self.mainView.remoteVideoView];
+  _remoteVideoTrack = nil;
+  [self.mainView.remoteVideoView renderFrame:nil];
   [_connectionManager disconnect];
 }
 
diff --git a/libjingle.gyp b/libjingle.gyp
index 803eaa3..90d1f41 100755
--- a/libjingle.gyp
+++ b/libjingle.gyp
@@ -204,8 +204,8 @@
             'app/webrtc/objc/RTCStatsReport.mm',
             'app/webrtc/objc/RTCVideoCapturer+Internal.h',
             'app/webrtc/objc/RTCVideoCapturer.mm',
-            'app/webrtc/objc/RTCVideoRenderer+Internal.h',
-            'app/webrtc/objc/RTCVideoRenderer.mm',
+            'app/webrtc/objc/RTCVideoRendererAdapter.h',
+            'app/webrtc/objc/RTCVideoRendererAdapter.mm',
             'app/webrtc/objc/RTCVideoSource+Internal.h',
             'app/webrtc/objc/RTCVideoSource.mm',
             'app/webrtc/objc/RTCVideoTrack+Internal.h',
@@ -259,7 +259,6 @@
           'conditions': [
             ['OS=="ios"', {
               'sources': [
-                'app/webrtc/objc/RTCEAGLVideoView+Internal.h',
                 'app/webrtc/objc/RTCEAGLVideoView.m',
                 'app/webrtc/objc/public/RTCEAGLVideoView.h',
               ],
diff --git a/libjingle_examples.gyp b/libjingle_examples.gyp
index d0f1747..f7ce53b 100755
--- a/libjingle_examples.gyp
+++ b/libjingle_examples.gyp
@@ -344,6 +344,7 @@
                 'examples/android/res/values/arrays.xml',
                 'examples/android/res/values/strings.xml',
                 'examples/android/res/xml/preferences.xml',
+                'examples/android/src/org/appspot/apprtc/AppRTCAudioManager.java',
                 'examples/android/src/org/appspot/apprtc/AppRTCClient.java',
                 'examples/android/src/org/appspot/apprtc/AppRTCDemoActivity.java',
                 'examples/android/src/org/appspot/apprtc/ConnectActivity.java',
diff --git a/media/base/fakemediaengine.h b/media/base/fakemediaengine.h
index db5e2e4..ff6ae16 100644
--- a/media/base/fakemediaengine.h
+++ b/media/base/fakemediaengine.h
@@ -879,9 +879,6 @@
     default_encoder_config_ = config;
     return true;
   }
-  VideoEncoderConfig GetDefaultEncoderConfig() const {
-    return default_encoder_config_;
-  }
   const VideoEncoderConfig& default_encoder_config() const {
     return default_encoder_config_;
   }
diff --git a/media/base/filemediaengine.h b/media/base/filemediaengine.h
index 9cc066d..ba29ab3 100644
--- a/media/base/filemediaengine.h
+++ b/media/base/filemediaengine.h
@@ -93,9 +93,6 @@
   virtual bool SetDefaultVideoEncoderConfig(const VideoEncoderConfig& config) {
     return true;
   }
-  virtual VideoEncoderConfig GetDefaultVideoEncoderConfig() const {
-    return VideoEncoderConfig();
-  }
   virtual bool SetSoundDevices(const Device* in_dev, const Device* out_dev) {
     return true;
   }
diff --git a/media/base/mediaengine.h b/media/base/mediaengine.h
index e7222f2..b8d661c 100644
--- a/media/base/mediaengine.h
+++ b/media/base/mediaengine.h
@@ -99,10 +99,6 @@
   // and encode video.
   virtual bool SetDefaultVideoEncoderConfig(const VideoEncoderConfig& config)
       = 0;
-  // Gets the default (maximum) codec/resolution and encoder option used to
-  // capture and encode video, as set by SetDefaultVideoEncoderConfig or the
-  // default from the video engine if not previously set.
-  virtual VideoEncoderConfig GetDefaultVideoEncoderConfig() const = 0;
 
   // Device selection
   // TODO(tschmelcher): Add method for selecting the soundclip device.
@@ -219,9 +215,6 @@
   virtual bool SetDefaultVideoEncoderConfig(const VideoEncoderConfig& config) {
     return video_.SetDefaultEncoderConfig(config);
   }
-  virtual VideoEncoderConfig GetDefaultVideoEncoderConfig() const {
-    return video_.GetDefaultEncoderConfig();
-  }
 
   virtual bool SetSoundDevices(const Device* in_device,
                                const Device* out_device) {
@@ -347,9 +340,6 @@
     return NULL;
   }
   bool SetOptions(const VideoOptions& options) { return true; }
-  VideoEncoderConfig GetDefaultEncoderConfig() const {
-    return VideoEncoderConfig();
-  }
   bool SetDefaultEncoderConfig(const VideoEncoderConfig& config) {
     return true;
   }
diff --git a/media/webrtc/constants.h b/media/webrtc/constants.h
index 5390c0d..821a41f 100755
--- a/media/webrtc/constants.h
+++ b/media/webrtc/constants.h
@@ -35,7 +35,9 @@
 extern const char kVp8CodecName[];
 extern const char kH264CodecName[];
 
-extern const int kDefaultFramerate;
+extern const int kDefaultVideoMaxWidth;
+extern const int kDefaultVideoMaxHeight;
+extern const int kDefaultVideoMaxFramerate;
 extern const int kMinVideoBitrate;
 extern const int kStartVideoBitrate;
 extern const int kMaxVideoBitrate;
diff --git a/media/webrtc/webrtcmediaengine.h b/media/webrtc/webrtcmediaengine.h
index bfcfb2a..b4436cd 100644
--- a/media/webrtc/webrtcmediaengine.h
+++ b/media/webrtc/webrtcmediaengine.h
@@ -130,9 +130,6 @@
       const VideoEncoderConfig& config) OVERRIDE {
     return delegate_->SetDefaultVideoEncoderConfig(config);
   }
-  virtual VideoEncoderConfig GetDefaultVideoEncoderConfig() const OVERRIDE {
-    return delegate_->GetDefaultVideoEncoderConfig();
-  }
   virtual bool SetSoundDevices(
       const Device* in_device, const Device* out_device) OVERRIDE {
     return delegate_->SetSoundDevices(in_device, out_device);
diff --git a/media/webrtc/webrtcvideoengine.cc b/media/webrtc/webrtcvideoengine.cc
index 04092f3..46d316f 100644
--- a/media/webrtc/webrtcvideoengine.cc
+++ b/media/webrtc/webrtcvideoengine.cc
@@ -55,6 +55,7 @@
 #include "webrtc/base/basictypes.h"
 #include "webrtc/base/buffer.h"
 #include "webrtc/base/byteorder.h"
+#include "webrtc/base/checks.h"
 #include "webrtc/base/common.h"
 #include "webrtc/base/cpumonitor.h"
 #include "webrtc/base/logging.h"
@@ -106,7 +107,10 @@
 
 const char kVp8CodecName[] = "VP8";
 
-const int kDefaultFramerate = 30;
+// TODO(ronghuawu): Change to 640x360.
+const int kDefaultVideoMaxWidth = 640;
+const int kDefaultVideoMaxHeight = 400;
+const int kDefaultVideoMaxFramerate = 30;
 const int kMinVideoBitrate = 30;
 const int kStartVideoBitrate = 300;
 const int kMaxVideoBitrate = 2000;
@@ -176,15 +180,57 @@
 static const rtc::DiffServCodePoint kVideoDscpValue =
     rtc::DSCP_AF41;
 
-static bool IsNackEnabled(const VideoCodec& codec) {
-  return codec.HasFeedbackParam(FeedbackParam(kRtcpFbParamNack,
-                                              kParamValueEmpty));
+bool IsNackEnabled(const VideoCodec& codec) {
+  return codec.HasFeedbackParam(
+      FeedbackParam(kRtcpFbParamNack, kParamValueEmpty));
 }
 
-// Returns true if Receiver Estimated Max Bitrate is enabled.
-static bool IsRembEnabled(const VideoCodec& codec) {
-  return codec.HasFeedbackParam(FeedbackParam(kRtcpFbParamRemb,
-                                              kParamValueEmpty));
+bool IsRembEnabled(const VideoCodec& codec) {
+  return codec.HasFeedbackParam(
+      FeedbackParam(kRtcpFbParamRemb, kParamValueEmpty));
+}
+
+void AddDefaultFeedbackParams(VideoCodec* codec) {
+  codec->AddFeedbackParam(FeedbackParam(kRtcpFbParamCcm, kRtcpFbCcmParamFir));
+  codec->AddFeedbackParam(FeedbackParam(kRtcpFbParamNack, kParamValueEmpty));
+  codec->AddFeedbackParam(FeedbackParam(kRtcpFbParamNack, kRtcpFbNackParamPli));
+  codec->AddFeedbackParam(FeedbackParam(kRtcpFbParamRemb, kParamValueEmpty));
+}
+
+bool CodecNameMatches(const std::string& name1, const std::string& name2) {
+  return _stricmp(name1.c_str(), name2.c_str()) == 0;
+}
+
+static VideoCodec MakeVideoCodecWithDefaultFeedbackParams(int payload_type,
+                                                          const char* name) {
+  VideoCodec codec(payload_type, name, kDefaultVideoMaxWidth,
+                   kDefaultVideoMaxHeight, kDefaultVideoMaxFramerate, 0);
+  AddDefaultFeedbackParams(&codec);
+  return codec;
+}
+
+static VideoCodec MakeVideoCodec(int payload_type, const char* name) {
+  return VideoCodec(payload_type, name, 0, 0, 0, 0);
+}
+
+static VideoCodec MakeRtxCodec(int payload_type, int associated_payload_type) {
+  return VideoCodec::CreateRtxCodec(payload_type, associated_payload_type);
+}
+
+bool CodecIsInternallySupported(const std::string& codec_name) {
+  if (CodecNameMatches(codec_name, kVp8CodecName)) {
+    return true;
+  }
+  return false;
+}
+
+std::vector<VideoCodec> DefaultVideoCodecList() {
+  std::vector<VideoCodec> codecs;
+  codecs.push_back(MakeVideoCodecWithDefaultFeedbackParams(100, kVp8CodecName));
+  codecs.push_back(MakeRtxCodec(96, 100));
+  codecs.push_back(MakeVideoCodec(116, kRedCodecName));
+  codecs.push_back(MakeVideoCodec(117, kUlpfecCodecName));
+  return codecs;
 }
 
 struct FlushBlackFrameData : public rtc::MessageData {
@@ -923,18 +969,6 @@
   AdaptFormatType adapt_format_type_;
 };
 
-const WebRtcVideoEngine::VideoCodecPref
-    WebRtcVideoEngine::kVideoCodecPrefs[] = {
-    {kVp8CodecName, 100, -1, 0},
-    {kRedCodecName, 116, -1, 1},
-    {kUlpfecCodecName, 117, -1, 2},
-    {kRtxCodecName, 96, 100, 3},
-};
-
-const VideoFormatPod WebRtcVideoEngine::kDefaultMaxVideoFormat =
-  {640, 400, FPS_TO_INTERVAL(30), FOURCC_ANY};
-// TODO(ronghuawu): Change to 640x360.
-
 static bool GetCpuOveruseOptions(const VideoOptions& options,
                                  webrtc::CpuOveruseOptions* overuse_options) {
   int underuse_threshold = 0;
@@ -1018,20 +1052,14 @@
     LOG_RTCERR1(SetTraceCallback, this);
   }
 
+  default_video_codec_list_ = DefaultVideoCodecList();
+
   // Set default quality levels for our supported codecs. We override them here
   // if we know your cpu performance is low, and they can be updated explicitly
   // by calling SetDefaultCodec.  For example by a flute preference setting, or
   // by the server with a jec in response to our reported system info.
-  VideoCodec max_codec(kVideoCodecPrefs[0].payload_type,
-                       kVideoCodecPrefs[0].name,
-                       kDefaultMaxVideoFormat.width,
-                       kDefaultMaxVideoFormat.height,
-                       VideoFormat::IntervalToFps(
-                           kDefaultMaxVideoFormat.interval),
-                       0);
-  if (!SetDefaultCodec(max_codec)) {
-    LOG(LS_ERROR) << "Failed to initialize list of supported codec types";
-  }
+  CHECK(SetDefaultCodec(default_video_codec_list_.front()))
+      << "Failed to initialize list of supported codec types.";
 
   // Consider jitter, packet loss, etc when rendering.  This will
   // theoretically make rendering more smooth.
@@ -1144,17 +1172,6 @@
   return SetDefaultCodec(config.max_codec);
 }
 
-VideoEncoderConfig WebRtcVideoEngine::GetDefaultEncoderConfig() const {
-  ASSERT(!video_codecs_.empty());
-  VideoCodec max_codec(kVideoCodecPrefs[0].payload_type,
-                       kVideoCodecPrefs[0].name,
-                       video_codecs_[0].width,
-                       video_codecs_[0].height,
-                       video_codecs_[0].framerate,
-                       0);
-  return VideoEncoderConfig(max_codec);
-}
-
 // SetDefaultCodec may be called while the capturer is running. For example, a
 // test call is started in a page with QVGA default codec, and then a real call
 // is started in another page with VGA default codec. This is the corner case
@@ -1225,13 +1242,12 @@
         return true;
     }
   }
-  for (size_t j = 0; j < ARRAY_SIZE(kVideoCodecPrefs); ++j) {
-    VideoCodec codec(kVideoCodecPrefs[j].payload_type,
-                     kVideoCodecPrefs[j].name, 0, 0, 0, 0);
-    if (codec.Matches(in)) {
+  for (size_t j = 0; j != default_video_codec_list_.size(); ++j) {
+    if (default_video_codec_list_[j].Matches(in)) {
       return true;
     }
   }
+
   return false;
 }
 
@@ -1439,17 +1455,6 @@
   }
 }
 
-static void AddDefaultFeedbackParams(VideoCodec* codec) {
-  const FeedbackParam kFir(kRtcpFbParamCcm, kRtcpFbCcmParamFir);
-  codec->AddFeedbackParam(kFir);
-  const FeedbackParam kNack(kRtcpFbParamNack, kParamValueEmpty);
-  codec->AddFeedbackParam(kNack);
-  const FeedbackParam kPli(kRtcpFbParamNack, kRtcpFbNackParamPli);
-  codec->AddFeedbackParam(kPli);
-  const FeedbackParam kRemb(kRtcpFbParamRemb, kParamValueEmpty);
-  codec->AddFeedbackParam(kRemb);
-}
-
 // Rebuilds the codec list to be only those that are less intensive
 // than the specified codec. Prefers internal codec over external with
 // higher preference field.
@@ -1459,27 +1464,17 @@
 
   video_codecs_.clear();
 
-  bool found = false;
   std::set<std::string> internal_codec_names;
-  for (size_t i = 0; i < ARRAY_SIZE(kVideoCodecPrefs); ++i) {
-    const VideoCodecPref& pref(kVideoCodecPrefs[i]);
-    if (!found)
-      found = (in_codec.name == pref.name);
-    if (found) {
-      VideoCodec codec(pref.payload_type, pref.name,
-                       in_codec.width, in_codec.height, in_codec.framerate,
-                       static_cast<int>(ARRAY_SIZE(kVideoCodecPrefs) - i));
-      if (_stricmp(kVp8CodecName, codec.name.c_str()) == 0) {
-        AddDefaultFeedbackParams(&codec);
-      }
-      if (pref.associated_payload_type != -1) {
-        codec.SetParam(kCodecParamAssociatedPayloadType,
-                       pref.associated_payload_type);
-      }
-      video_codecs_.push_back(codec);
-      internal_codec_names.insert(codec.name);
-    }
+  for (size_t i = 0; i != default_video_codec_list_.size(); ++i) {
+    VideoCodec codec = default_video_codec_list_[i];
+    codec.width = in_codec.width;
+    codec.height = in_codec.height;
+    codec.framerate = in_codec.framerate;
+    video_codecs_.push_back(codec);
+
+    internal_codec_names.insert(codec.name);
   }
+
   if (encoder_factory_) {
     const std::vector<WebRtcVideoEncoderFactory::VideoCodec>& codecs =
         encoder_factory_->codecs();
@@ -1487,8 +1482,6 @@
       bool is_internal_codec = internal_codec_names.find(codecs[i].name) !=
           internal_codec_names.end();
       if (!is_internal_codec) {
-        if (!found)
-          found = (in_codec.name == codecs[i].name);
         VideoCodec codec(
             GetExternalVideoPayloadType(static_cast<int>(i)),
             codecs[i].name,
@@ -1503,7 +1496,6 @@
       }
     }
   }
-  ASSERT(found);
   return true;
 }
 
@@ -1609,12 +1601,10 @@
   encoder_factory_ = encoder_factory;
 
   // Rebuild codec list while reapplying the current default codec format.
-  VideoCodec max_codec(kVideoCodecPrefs[0].payload_type,
-                       kVideoCodecPrefs[0].name,
-                       video_codecs_[0].width,
-                       video_codecs_[0].height,
-                       video_codecs_[0].framerate,
-                       0);
+  VideoCodec max_codec = default_video_codec_list_[0];
+  max_codec.width = video_codecs_[0].width;
+  max_codec.height = video_codecs_[0].height;
+  max_codec.framerate = video_codecs_[0].framerate;
   if (!RebuildCodecList(max_codec)) {
     LOG(LS_ERROR) << "Failed to initialize list of supported codec types";
   }
@@ -1661,10 +1651,12 @@
 }
 
 WebRtcVideoMediaChannel::~WebRtcVideoMediaChannel() {
-  const bool send = false;
-  SetSend(send);
-  const bool render = false;
-  SetRender(render);
+  Terminate();
+}
+
+void WebRtcVideoMediaChannel::Terminate() {
+  SetSend(false);
+  SetRender(false);
 
   while (!send_channels_.empty()) {
     if (!DeleteSendChannel(send_channels_.begin()->first)) {
@@ -1987,14 +1979,21 @@
     SetReceiverReportSsrc(sp.first_ssrc());
   }
 
-  send_channel->set_stream_params(sp);
-
-  // Reset send codec after stream parameters changed.
   if (send_codec_) {
-    if (!SetSendCodec(send_channel, *send_codec_)) {
+    send_channel->SetAdaptFormat(
+        VideoFormatFromVieCodec(*send_codec_),
+        WebRtcVideoChannelSendInfo::kAdaptFormatTypeCodec);
+
+    VideoSendParams send_params;
+    send_params.codec = *send_codec_;
+    send_params.stream = sp;
+    if (!SetSendParams(send_channel, send_params)) {
       return false;
     }
-    LogSendCodecChange("SetSendStreamFormat()");
+    LogSendCodecChange("AddStream()");
+  } else {
+    // Save the stream params for later, when we have a codec.
+    send_channel->set_stream_params(sp);
   }
 
   if (sending_) {
@@ -2990,39 +2989,40 @@
   VideoOptions original = options_;
   options_.SetAll(options);
 
-  // Set CPU options for all send channels.
+  // Set CPU options and codec options for all send channels.
   for (SendChannelMap::iterator iter = send_channels_.begin();
        iter != send_channels_.end(); ++iter) {
     WebRtcVideoChannelSendInfo* send_channel = iter->second;
     send_channel->ApplyCpuOptions(options_);
+
+    if (send_codec_) {
+      VideoSendParams send_params = send_channel->send_params();
+
+      bool conference_mode_turned_off = (
+          original.conference_mode.IsSet() &&
+          options.conference_mode.IsSet() &&
+          original.conference_mode.GetWithDefaultIfUnset(false) &&
+          !options.conference_mode.GetWithDefaultIfUnset(false));
+      if (conference_mode_turned_off) {
+        // This is a special case for turning conference mode off.
+        // Max bitrate should go back to the default maximum value instead
+        // of the current maximum.
+        send_params.codec.maxBitrate = kAutoBandwidth;
+      }
+
+      // TODO(pthatcher): Remove this.  We don't need 4 ways to set bitrates.
+      int new_start_bitrate;
+      if (options.video_start_bitrate.Get(&new_start_bitrate)) {
+        send_params.codec.startBitrate = new_start_bitrate;
+      }
+
+      if (!SetSendParams(send_channel, send_params)) {
+        return false;
+      }
+      LogSendCodecChange("SetOptions()");
+    }
   }
 
-  if (send_codec_) {
-    webrtc::VideoCodec new_codec = *send_codec_;
-
-    bool conference_mode_turned_off = (
-        original.conference_mode.IsSet() &&
-        options.conference_mode.IsSet() &&
-        original.conference_mode.GetWithDefaultIfUnset(false) &&
-        !options.conference_mode.GetWithDefaultIfUnset(false));
-    if (conference_mode_turned_off) {
-      // This is a special case for turning conference mode off.
-      // Max bitrate should go back to the default maximum value instead
-      // of the current maximum.
-      new_codec.maxBitrate = kAutoBandwidth;
-    }
-
-    // TODO(pthatcher): Remove this.  We don't need 4 ways to set bitrates.
-    int new_start_bitrate;
-    if (options.video_start_bitrate.Get(&new_start_bitrate)) {
-      new_codec.startBitrate = new_start_bitrate;
-    }
-
-    if (!SetSendCodec(new_codec)) {
-      return false;
-    }
-    LogSendCodecChange("SetOptions()");
-  }
 
   int buffer_latency;
   if (Changed(options.buffered_mode_latency,
@@ -3676,26 +3676,9 @@
       VideoFormatFromVieCodec(codec),
       WebRtcVideoChannelSendInfo::kAdaptFormatTypeCodec);
 
-  MaybeRegisterExternalEncoder(send_channel, codec);
-
   VideoSendParams send_params = send_channel->send_params();
   send_params.codec = codec;
-  if (!SetSendParams(send_channel, send_params)) {
-    return false;
-  }
-
-  // NOTE: SetRtxSendPayloadType must be called after all simulcast SSRCs
-  // are configured. Otherwise ssrc's configured after this point will use
-  // the primary PT for RTX.
-  const int channel_id = send_channel->channel_id();
-  if (send_rtx_type_ != -1 &&
-      engine()->vie()->rtp()->SetRtxSendPayloadType(channel_id,
-                                                    send_rtx_type_) != 0) {
-    LOG_RTCERR2(SetRtxSendPayloadType, channel_id, send_rtx_type_);
-    return false;
-  }
-
-  return true;
+  return SetSendParams(send_channel, send_params);
 }
 
 static std::string ToString(webrtc::VideoCodecComplexity complexity) {
@@ -3872,6 +3855,8 @@
     const VideoSendParams& send_params) {
   const int channel_id = send_channel->channel_id();
 
+  MaybeRegisterExternalEncoder(send_channel, send_params.codec);
+
   CapturedFrameInfo frame;
   send_channel->last_captured_frame_info().Get(&frame);
 
@@ -3923,10 +3908,18 @@
   }
   engine()->vie()->rtp()->SetTransmissionSmoothingStatus(channel_id, true);
 
-  if (send_channel->IsActive()) {
-    if (!SetSendSsrcs(channel_id, send_params.stream, codec)) {
-      return false;
-    }
+  if (!SetSendSsrcs(channel_id, send_params.stream, codec)) {
+    return false;
+  }
+
+  // NOTE: SetRtxSendPayloadType must be called after all SSRCs are
+  // configured. Otherwise ssrc's configured after this point will use
+  // the primary PT for RTX.
+  if (send_rtx_type_ != -1 &&
+      engine()->vie()->rtp()->SetRtxSendPayloadType(channel_id,
+                                                    send_rtx_type_) != 0) {
+    LOG_RTCERR2(SetRtxSendPayloadType, channel_id, send_rtx_type_);
+    return false;
   }
 
   send_channel->set_send_params(send_params);
diff --git a/media/webrtc/webrtcvideoengine.h b/media/webrtc/webrtcvideoengine.h
index cc81ee9..db091af 100644
--- a/media/webrtc/webrtcvideoengine.h
+++ b/media/webrtc/webrtcvideoengine.h
@@ -85,6 +85,15 @@
 struct CapturedFrame;
 struct Device;
 
+// This set of methods is declared here for the sole purpose of sharing code
+// between webrtc video engine v1 and v2.
+std::vector<VideoCodec> DefaultVideoCodecList();
+bool CodecNameMatches(const std::string& name1, const std::string& name2);
+bool CodecIsInternallySupported(const std::string& codec_name);
+bool IsNackEnabled(const VideoCodec& codec);
+bool IsRembEnabled(const VideoCodec& codec);
+void AddDefaultFeedbackParams(VideoCodec* codec);
+
 class WebRtcVideoEngine : public sigslot::has_slots<>,
                           public webrtc::TraceCallback {
  public:
@@ -108,7 +117,6 @@
 
   int GetCapabilities();
   bool SetDefaultEncoderConfig(const VideoEncoderConfig& config);
-  VideoEncoderConfig GetDefaultEncoderConfig() const;
 
   // TODO(pbos): Remove when all call sites use VideoOptions.
   virtual WebRtcVideoMediaChannel* CreateChannel(
@@ -177,6 +185,10 @@
   rtc::CpuMonitor* cpu_monitor() { return cpu_monitor_.get(); }
 
  protected:
+  bool initialized() const {
+    return initialized_;
+  }
+
   // When a video processor registers with the engine.
   // SignalMediaFrame will be invoked for every video frame.
   // See videoprocessor.h for param reference.
@@ -184,18 +196,6 @@
 
  private:
   typedef std::vector<WebRtcVideoMediaChannel*> VideoChannels;
-  struct VideoCodecPref {
-    const char* name;
-    int payload_type;
-    // For RTX, this field is the payload-type that RTX applies to.
-    // For other codecs, it should be set to -1.
-    int associated_payload_type;
-    int pref;
-  };
-
-  static const VideoCodecPref kVideoCodecPrefs[];
-  static const VideoFormatPod kVideoFormats[];
-  static const VideoFormatPod kDefaultMaxVideoFormat;
 
   void Construct(ViEWrapper* vie_wrapper,
                  ViETraceWrapper* tracing,
@@ -222,6 +222,7 @@
   WebRtcVideoEncoderFactory* encoder_factory_;
   WebRtcVideoDecoderFactory* decoder_factory_;
   std::vector<VideoCodec> video_codecs_;
+  std::vector<VideoCodec> default_video_codec_list_;
   std::vector<RtpHeaderExtension> rtp_header_extensions_;
   VideoFormat default_codec_format_;
 
@@ -326,6 +327,7 @@
   virtual void OnMessage(rtc::Message* msg) OVERRIDE;
 
  protected:
+  void Terminate();
   int GetLastEngineError() { return engine()->GetLastEngineError(); }
 
   // webrtc::Transport:
diff --git a/media/webrtc/webrtcvideoengine2.cc b/media/webrtc/webrtcvideoengine2.cc
index d79f71d..5b5f12e 100644
--- a/media/webrtc/webrtcvideoengine2.cc
+++ b/media/webrtc/webrtcvideoengine2.cc
@@ -36,6 +36,7 @@
 #include "talk/media/base/videorenderer.h"
 #include "talk/media/webrtc/constants.h"
 #include "talk/media/webrtc/webrtcvideocapturer.h"
+#include "talk/media/webrtc/webrtcvideoengine.h"
 #include "talk/media/webrtc/webrtcvideoframe.h"
 #include "talk/media/webrtc/webrtcvoiceengine.h"
 #include "webrtc/base/buffer.h"
@@ -51,26 +52,6 @@
 
 namespace cricket {
 namespace {
-
-static bool CodecNameMatches(const std::string& name1,
-                             const std::string& name2) {
-  return _stricmp(name1.c_str(), name2.c_str()) == 0;
-}
-
-const char* kInternallySupportedCodecs[] = {
-    kVp8CodecName,
-};
-
-// True if codec is supported by a software implementation that's always
-// available.
-static bool CodecIsInternallySupported(const std::string& codec_name) {
-  for (size_t i = 0; i < ARRAY_SIZE(kInternallySupportedCodecs); ++i) {
-    if (CodecNameMatches(codec_name, kInternallySupportedCodecs[i]))
-      return true;
-  }
-  return false;
-}
-
 static std::string CodecVectorToString(const std::vector<VideoCodec>& codecs) {
   std::stringstream out;
   out << '{';
@@ -116,6 +97,29 @@
   return out.str();
 }
 
+// Merges two fec configs and logs an error if a conflict arises
+// such that merging in diferent order would trigger a diferent output.
+static void MergeFecConfig(const webrtc::FecConfig& other,
+                           webrtc::FecConfig* output) {
+  if (other.ulpfec_payload_type != -1) {
+    if (output->ulpfec_payload_type != -1 &&
+        output->ulpfec_payload_type != other.ulpfec_payload_type) {
+      LOG(LS_WARNING) << "Conflict merging ulpfec_payload_type configs: "
+                      << output->ulpfec_payload_type << " and "
+                      << other.ulpfec_payload_type;
+    }
+    output->ulpfec_payload_type = other.ulpfec_payload_type;
+  }
+  if (other.red_payload_type != -1) {
+    if (output->red_payload_type != -1 &&
+        output->red_payload_type != other.red_payload_type) {
+      LOG(LS_WARNING) << "Conflict merging red_payload_type configs: "
+                      << output->red_payload_type << " and "
+                      << other.red_payload_type;
+    }
+    output->red_payload_type = other.red_payload_type;
+  }
+}
 }  // namespace
 
 // This constant is really an on/off, lower-level configurable NACK history
@@ -135,19 +139,8 @@
 static const size_t kMaxExternalVideoCodecs = 8;
 #endif
 
-struct VideoCodecPref {
-  int payload_type;
-  int width;
-  int height;
-  const char* name;
-  int rtx_payload_type;
-} kDefaultVideoCodecPref = {100, 640, 400, kVp8CodecName, 96};
-
 const char kH264CodecName[] = "H264";
 
-VideoCodecPref kRedPref = {116, -1, -1, kRedCodecName, -1};
-VideoCodecPref kUlpfecPref = {117, -1, -1, kUlpfecCodecName, -1};
-
 static bool FindFirstMatchingCodec(const std::vector<VideoCodec>& codecs,
                                    const VideoCodec& requested_codec,
                                    VideoCodec* matching_codec) {
@@ -160,59 +153,6 @@
   return false;
 }
 
-static void AddDefaultFeedbackParams(VideoCodec* codec) {
-  const FeedbackParam kFir(kRtcpFbParamCcm, kRtcpFbCcmParamFir);
-  codec->AddFeedbackParam(kFir);
-  const FeedbackParam kNack(kRtcpFbParamNack, kParamValueEmpty);
-  codec->AddFeedbackParam(kNack);
-  const FeedbackParam kPli(kRtcpFbParamNack, kRtcpFbNackParamPli);
-  codec->AddFeedbackParam(kPli);
-  const FeedbackParam kRemb(kRtcpFbParamRemb, kParamValueEmpty);
-  codec->AddFeedbackParam(kRemb);
-}
-
-static bool IsNackEnabled(const VideoCodec& codec) {
-  return codec.HasFeedbackParam(
-      FeedbackParam(kRtcpFbParamNack, kParamValueEmpty));
-}
-
-static bool IsRembEnabled(const VideoCodec& codec) {
-  return codec.HasFeedbackParam(
-      FeedbackParam(kRtcpFbParamRemb, kParamValueEmpty));
-}
-
-static VideoCodec DefaultVideoCodec() {
-  VideoCodec default_codec(kDefaultVideoCodecPref.payload_type,
-                           kDefaultVideoCodecPref.name,
-                           kDefaultVideoCodecPref.width,
-                           kDefaultVideoCodecPref.height,
-                           kDefaultFramerate,
-                           0);
-  AddDefaultFeedbackParams(&default_codec);
-  return default_codec;
-}
-
-static VideoCodec DefaultRedCodec() {
-  return VideoCodec(kRedPref.payload_type, kRedPref.name, 0, 0, 0, 0);
-}
-
-static VideoCodec DefaultUlpfecCodec() {
-  return VideoCodec(kUlpfecPref.payload_type, kUlpfecPref.name, 0, 0, 0, 0);
-}
-
-static std::vector<VideoCodec> DefaultVideoCodecs() {
-  std::vector<VideoCodec> codecs;
-  codecs.push_back(DefaultVideoCodec());
-  codecs.push_back(DefaultRedCodec());
-  codecs.push_back(DefaultUlpfecCodec());
-  if (kDefaultVideoCodecPref.rtx_payload_type != -1) {
-    codecs.push_back(
-        VideoCodec::CreateRtxCodec(kDefaultVideoCodecPref.rtx_payload_type,
-                                   kDefaultVideoCodecPref.payload_type));
-  }
-  return codecs;
-}
-
 static bool ValidateRtpHeaderExtensionIds(
     const std::vector<RtpHeaderExtension>& extensions) {
   std::set<int> extensions_used;
@@ -258,7 +198,7 @@
   stream.width = codec.width;
   stream.height = codec.height;
   stream.max_framerate =
-      codec.framerate != 0 ? codec.framerate : kDefaultFramerate;
+      codec.framerate != 0 ? codec.framerate : kDefaultVideoMaxFramerate;
 
   int min_bitrate = kMinVideoBitrate;
   codec.GetParam(kCodecParamMinBitrate, &min_bitrate);
@@ -350,9 +290,9 @@
 WebRtcVideoEngine2::WebRtcVideoEngine2()
     : worker_thread_(NULL),
       voice_engine_(NULL),
-      default_codec_format_(kDefaultVideoCodecPref.width,
-                            kDefaultVideoCodecPref.height,
-                            FPS_TO_INTERVAL(kDefaultFramerate),
+      default_codec_format_(kDefaultVideoMaxWidth,
+                            kDefaultVideoMaxHeight,
+                            FPS_TO_INTERVAL(kDefaultVideoMaxFramerate),
                             FOURCC_ANY),
       initialized_(false),
       cpu_monitor_(new rtc::CpuMonitor(NULL)),
@@ -427,10 +367,6 @@
   return true;
 }
 
-VideoEncoderConfig WebRtcVideoEngine2::GetDefaultEncoderConfig() const {
-  return VideoEncoderConfig(DefaultVideoCodec());
-}
-
 WebRtcVideoChannel2* WebRtcVideoEngine2::CreateChannel(
     const VideoOptions& options,
     VoiceMediaChannel* voice_channel) {
@@ -575,7 +511,7 @@
 }
 
 std::vector<VideoCodec> WebRtcVideoEngine2::GetSupportedCodecs() const {
-  std::vector<VideoCodec> supported_codecs = DefaultVideoCodecs();
+  std::vector<VideoCodec> supported_codecs = DefaultVideoCodecList();
 
   if (external_encoder_factory_ == NULL) {
     return supported_codecs;
@@ -900,9 +836,16 @@
     return false;
   }
 
-  send_codec_.Set(supported_codecs.front());
   LOG(LS_INFO) << "Using codec: " << supported_codecs.front().codec.ToString();
 
+  VideoCodecSettings old_codec;
+  if (send_codec_.Get(&old_codec) && supported_codecs.front() == old_codec) {
+    // Using same codec, avoid reconfiguring.
+    return true;
+  }
+
+  send_codec_.Set(supported_codecs.front());
+
   rtc::CritScope stream_lock(&stream_crit_);
   for (std::map<uint32, WebRtcVideoSendStream*>::iterator it =
            send_streams_.begin();
@@ -1101,19 +1044,19 @@
   }
 
   for (size_t i = 0; i < recv_codecs_.size(); ++i) {
-    if (recv_codecs_[i].codec.id == kDefaultVideoCodecPref.payload_type) {
-      config->rtp.fec = recv_codecs_[i].fec;
-      uint32 rtx_ssrc;
-      if (recv_codecs_[i].rtx_payload_type != -1 &&
-          sp.GetFidSsrc(ssrc, &rtx_ssrc)) {
-        config->rtp.rtx[kDefaultVideoCodecPref.payload_type].ssrc = rtx_ssrc;
-        config->rtp.rtx[kDefaultVideoCodecPref.payload_type].payload_type =
-            recv_codecs_[i].rtx_payload_type;
-      }
-      break;
-    }
+    MergeFecConfig(recv_codecs_[i].fec, &config->rtp.fec);
   }
 
+  for (size_t i = 0; i < recv_codecs_.size(); ++i) {
+    uint32 rtx_ssrc;
+    if (recv_codecs_[i].rtx_payload_type != -1 &&
+        sp.GetFidSsrc(ssrc, &rtx_ssrc)) {
+      webrtc::VideoReceiveStream::Config::Rtp::Rtx& rtx =
+          config->rtp.rtx[recv_codecs_[i].codec.id];
+      rtx.ssrc = rtx_ssrc;
+      rtx.payload_type = recv_codecs_[i].rtx_payload_type;
+    }
+  }
 }
 
 bool WebRtcVideoChannel2::RemoveRecvStream(uint32 ssrc) {
@@ -1202,7 +1145,21 @@
 
 void WebRtcVideoChannel2::FillBandwidthEstimationStats(
     VideoMediaInfo* video_media_info) {
-  // TODO(pbos): Implement.
+  BandwidthEstimationInfo bwe_info;
+  webrtc::Call::Stats stats = call_->GetStats();
+  bwe_info.available_send_bandwidth = stats.send_bandwidth_bps;
+  bwe_info.available_recv_bandwidth = stats.recv_bandwidth_bps;
+  bwe_info.bucket_delay = stats.pacer_delay_ms;
+
+  // Get send stream bitrate stats.
+  rtc::CritScope stream_lock(&stream_crit_);
+  for (std::map<uint32, WebRtcVideoSendStream*>::iterator stream =
+           send_streams_.begin();
+       stream != send_streams_.end();
+       ++stream) {
+    stream->second->FillBandwidthEstimationInfo(&bwe_info);
+  }
+  video_media_info->bw_estimations.push_back(bwe_info);
 }
 
 bool WebRtcVideoChannel2::SetCapturer(uint32 ssrc, VideoCapturer* capturer) {
@@ -1322,6 +1279,7 @@
     return false;
 
   send_rtp_extensions_ = FilterRtpExtensions(extensions);
+
   rtc::CritScope stream_lock(&stream_crit_);
   for (std::map<uint32, WebRtcVideoSendStream*>::iterator it =
            send_streams_.begin();
@@ -1339,8 +1297,13 @@
 }
 
 bool WebRtcVideoChannel2::SetOptions(const VideoOptions& options) {
-  LOG(LS_VERBOSE) << "SetOptions: " << options.ToString();
+  LOG(LS_INFO) << "SetOptions: " << options.ToString();
+  VideoOptions old_options = options_;
   options_.SetAll(options);
+  if (options_ == old_options) {
+    // No new options to set.
+    return true;
+  }
   rtc::CritScope stream_lock(&stream_crit_);
   for (std::map<uint32, WebRtcVideoSendStream*>::iterator it =
            send_streams_.begin();
@@ -1552,13 +1515,16 @@
         LOG(LS_VERBOSE) << "Disabling capturer, sending black frame.";
         webrtc::I420VideoFrame black_frame;
 
+        // TODO(pbos): Base width/height on last_dimensions_. This will however
+        // fail the test AddRemoveCapturer which needs to be fixed to permit
+        // sending black frames in the same size that was previously sent.
         int width = format_.width;
         int height = format_.height;
         int half_width = (width + 1) / 2;
         black_frame.CreateEmptyFrame(
             width, height, width, half_width, half_width);
         SetWebRtcFrameToBlack(&black_frame);
-        SetDimensions(width, height, false);
+        SetDimensions(width, height, last_dimensions_.is_screencast);
         stream_->Input()->SwapFrame(&black_frame);
       }
 
@@ -1685,13 +1651,17 @@
 void WebRtcVideoChannel2::WebRtcVideoSendStream::SetCodecAndOptions(
     const VideoCodecSettings& codec_settings,
     const VideoOptions& options) {
-  std::vector<webrtc::VideoStream> video_streams =
-      encoder_factory_->CreateVideoStreams(
-          codec_settings.codec, options, parameters_.config.rtp.ssrcs.size());
-  if (video_streams.empty()) {
+  if (last_dimensions_.width == -1) {
+    last_dimensions_.width = codec_settings.codec.width;
+    last_dimensions_.height = codec_settings.codec.height;
+    last_dimensions_.is_screencast = false;
+  }
+  parameters_.encoder_config =
+      CreateVideoEncoderConfig(last_dimensions_, codec_settings.codec);
+  if (parameters_.encoder_config.streams.empty()) {
     return;
   }
-  parameters_.encoder_config.streams = video_streams;
+
   format_ = VideoFormat(codec_settings.codec.width,
                         codec_settings.codec.height,
                         VideoFormat::FpsToInterval(30),
@@ -1735,39 +1705,12 @@
   RecreateWebRtcStream();
 }
 
-void WebRtcVideoChannel2::WebRtcVideoSendStream::SetDimensions(
-    int width,
-    int height,
-    bool is_screencast) {
-  if (last_dimensions_.width == width && last_dimensions_.height == height &&
-      last_dimensions_.is_screencast == is_screencast) {
-    // Configured using the same parameters, do not reconfigure.
-    return;
-  }
-
-  last_dimensions_.width = width;
-  last_dimensions_.height = height;
-  last_dimensions_.is_screencast = is_screencast;
-
-  assert(!parameters_.encoder_config.streams.empty());
-  LOG(LS_VERBOSE) << "SetDimensions: " << width << "x" << height;
-
-  VideoCodecSettings codec_settings;
-  parameters_.codec_settings.Get(&codec_settings);
-  // Restrict dimensions according to codec max.
-  if (!is_screencast) {
-    if (codec_settings.codec.width < width)
-      width = codec_settings.codec.width;
-    if (codec_settings.codec.height < height)
-      height = codec_settings.codec.height;
-  }
-
-  webrtc::VideoEncoderConfig encoder_config = parameters_.encoder_config;
-  encoder_config.encoder_specific_settings =
-      encoder_factory_->CreateVideoEncoderSettings(codec_settings.codec,
-                                                   parameters_.options);
-
-  if (is_screencast) {
+webrtc::VideoEncoderConfig
+WebRtcVideoChannel2::WebRtcVideoSendStream::CreateVideoEncoderConfig(
+    const Dimensions& dimensions,
+    const VideoCodec& codec) const {
+  webrtc::VideoEncoderConfig encoder_config;
+  if (dimensions.is_screencast) {
     int screencast_min_bitrate_kbps;
     parameters_.options.screencast_min_bitrate.Get(
         &screencast_min_bitrate_kbps);
@@ -1779,20 +1722,60 @@
     encoder_config.content_type = webrtc::VideoEncoderConfig::kRealtimeVideo;
   }
 
-  VideoCodec codec = codec_settings.codec;
-  codec.width = width;
-  codec.height = height;
+  // Restrict dimensions according to codec max.
+  int width = dimensions.width;
+  int height = dimensions.height;
+  if (!dimensions.is_screencast) {
+    if (codec.width < width)
+      width = codec.width;
+    if (codec.height < height)
+      height = codec.height;
+  }
+
+  VideoCodec clamped_codec = codec;
+  clamped_codec.width = width;
+  clamped_codec.height = height;
 
   encoder_config.streams = encoder_factory_->CreateVideoStreams(
-      codec, parameters_.options, parameters_.config.rtp.ssrcs.size());
+      clamped_codec, parameters_.options, parameters_.config.rtp.ssrcs.size());
 
   // Conference mode screencast uses 2 temporal layers split at 100kbit.
   if (parameters_.options.conference_mode.GetWithDefaultIfUnset(false) &&
-      is_screencast && encoder_config.streams.size() == 1) {
+      dimensions.is_screencast && encoder_config.streams.size() == 1) {
     encoder_config.streams[0].temporal_layer_thresholds_bps.clear();
     encoder_config.streams[0].temporal_layer_thresholds_bps.push_back(
         kConferenceModeTemporalLayerBitrateBps);
   }
+  return encoder_config;
+}
+
+void WebRtcVideoChannel2::WebRtcVideoSendStream::SetDimensions(
+    int width,
+    int height,
+    bool is_screencast) {
+  if (last_dimensions_.width == width && last_dimensions_.height == height &&
+      last_dimensions_.is_screencast == is_screencast) {
+    // Configured using the same parameters, do not reconfigure.
+    return;
+  }
+  LOG(LS_INFO) << "SetDimensions: " << width << "x" << height
+               << (is_screencast ? " (screencast)" : " (not screencast)");
+
+  last_dimensions_.width = width;
+  last_dimensions_.height = height;
+  last_dimensions_.is_screencast = is_screencast;
+
+  assert(!parameters_.encoder_config.streams.empty());
+
+  VideoCodecSettings codec_settings;
+  parameters_.codec_settings.Get(&codec_settings);
+
+  webrtc::VideoEncoderConfig encoder_config =
+      CreateVideoEncoderConfig(last_dimensions_, codec_settings.codec);
+
+  encoder_config.encoder_specific_settings =
+      encoder_factory_->CreateVideoEncoderSettings(codec_settings.codec,
+                                                   parameters_.options);
 
   bool stream_reconfigured = stream_->ReconfigureVideoEncoder(encoder_config);
 
@@ -1842,12 +1825,12 @@
   info.framerate_input = stats.input_frame_rate;
   info.framerate_sent = stats.encode_frame_rate;
 
-  for (std::map<uint32_t, webrtc::StreamStats>::iterator it =
+  for (std::map<uint32_t, webrtc::SsrcStats>::iterator it =
            stats.substreams.begin();
        it != stats.substreams.end();
        ++it) {
     // TODO(pbos): Wire up additional stats, such as padding bytes.
-    webrtc::StreamStats stream_stats = it->second;
+    webrtc::SsrcStats stream_stats = it->second;
     info.bytes_sent += stream_stats.rtp_stats.bytes +
                        stream_stats.rtp_stats.header_bytes +
                        stream_stats.rtp_stats.padding_bytes;
@@ -1857,7 +1840,7 @@
 
   if (!stats.substreams.empty()) {
     // TODO(pbos): Report fraction lost per SSRC.
-    webrtc::StreamStats first_stream_stats = stats.substreams.begin()->second;
+    webrtc::SsrcStats first_stream_stats = stats.substreams.begin()->second;
     info.fraction_lost =
         static_cast<float>(first_stream_stats.rtcp_stats.fraction_lost) /
         (1 << 8);
@@ -1884,6 +1867,23 @@
   return info;
 }
 
+void WebRtcVideoChannel2::WebRtcVideoSendStream::FillBandwidthEstimationInfo(
+    BandwidthEstimationInfo* bwe_info) {
+  rtc::CritScope cs(&lock_);
+  if (stream_ == NULL) {
+    return;
+  }
+  webrtc::VideoSendStream::Stats stats = stream_->GetStats();
+  for (std::map<uint32_t, webrtc::SsrcStats>::iterator it =
+           stats.substreams.begin();
+       it != stats.substreams.end();
+       ++it) {
+    bwe_info->transmit_bitrate += it->second.total_bitrate_bps;
+    bwe_info->retransmit_bitrate += it->second.retransmit_bitrate_bps;
+  }
+  bwe_info->actual_enc_bitrate = stats.media_bitrate_bps;
+}
+
 void WebRtcVideoChannel2::WebRtcVideoSendStream::OnCpuResolutionRequest(
     CoordinatedVideoAdapter::AdaptRequest adapt_request) {
   rtc::CritScope cs(&lock_);
@@ -2106,6 +2106,14 @@
 WebRtcVideoChannel2::VideoCodecSettings::VideoCodecSettings()
     : rtx_payload_type(-1) {}
 
+bool WebRtcVideoChannel2::VideoCodecSettings::operator==(
+    const WebRtcVideoChannel2::VideoCodecSettings& other) const {
+  return codec == other.codec &&
+         fec.ulpfec_payload_type == other.fec.ulpfec_payload_type &&
+         fec.red_payload_type == other.fec.red_payload_type &&
+         rtx_payload_type == other.rtx_payload_type;
+}
+
 std::vector<WebRtcVideoChannel2::VideoCodecSettings>
 WebRtcVideoChannel2::MapCodecs(const std::vector<VideoCodec>& codecs) {
   assert(!codecs.empty());
diff --git a/media/webrtc/webrtcvideoengine2.h b/media/webrtc/webrtcvideoengine2.h
index 0b812ef..9a5fe65 100644
--- a/media/webrtc/webrtcvideoengine2.h
+++ b/media/webrtc/webrtcvideoengine2.h
@@ -145,7 +145,6 @@
 
   int GetCapabilities();
   bool SetDefaultEncoderConfig(const VideoEncoderConfig& config);
-  VideoEncoderConfig GetDefaultEncoderConfig() const;
 
   WebRtcVideoChannel2* CreateChannel(const VideoOptions& options,
                                      VoiceMediaChannel* voice_channel);
@@ -281,6 +280,8 @@
   struct VideoCodecSettings {
     VideoCodecSettings();
 
+    bool operator ==(const VideoCodecSettings& other) const;
+
     VideoCodec codec;
     webrtc::FecConfig fec;
     int rtx_payload_type;
@@ -315,6 +316,7 @@
     void Stop();
 
     VideoSenderInfo GetVideoSenderInfo();
+    void FillBandwidthEstimationInfo(BandwidthEstimationInfo* bwe_info);
 
     void OnCpuResolutionRequest(
         CoordinatedVideoAdapter::AdaptRequest adapt_request);
@@ -348,8 +350,8 @@
       bool external;
     };
 
-    struct LastDimensions {
-      LastDimensions() : width(-1), height(-1), is_screencast(false) {}
+    struct Dimensions {
+      Dimensions() : width(-1), height(-1), is_screencast(false) {}
       int width;
       int height;
       bool is_screencast;
@@ -357,23 +359,28 @@
 
     AllocatedEncoder CreateVideoEncoder(const VideoCodec& codec)
         EXCLUSIVE_LOCKS_REQUIRED(lock_);
-    void DestroyVideoEncoder(AllocatedEncoder* encoder);
+    void DestroyVideoEncoder(AllocatedEncoder* encoder)
+        EXCLUSIVE_LOCKS_REQUIRED(lock_);
     void SetCodecAndOptions(const VideoCodecSettings& codec,
                             const VideoOptions& options)
         EXCLUSIVE_LOCKS_REQUIRED(lock_);
     void RecreateWebRtcStream() EXCLUSIVE_LOCKS_REQUIRED(lock_);
+    webrtc::VideoEncoderConfig CreateVideoEncoderConfig(
+        const Dimensions& dimensions,
+        const VideoCodec& codec) const EXCLUSIVE_LOCKS_REQUIRED(lock_);
     void SetDimensions(int width, int height, bool is_screencast)
         EXCLUSIVE_LOCKS_REQUIRED(lock_);
 
     webrtc::Call* const call_;
-    WebRtcVideoEncoderFactory* const external_encoder_factory_;
-    WebRtcVideoEncoderFactory2* const encoder_factory_;
+    WebRtcVideoEncoderFactory* const external_encoder_factory_
+        GUARDED_BY(lock_);
+    WebRtcVideoEncoderFactory2* const encoder_factory_ GUARDED_BY(lock_);
 
     rtc::CriticalSection lock_;
     webrtc::VideoSendStream* stream_ GUARDED_BY(lock_);
     VideoSendStreamParameters parameters_ GUARDED_BY(lock_);
     AllocatedEncoder allocated_encoder_ GUARDED_BY(lock_);
-    LastDimensions last_dimensions_ GUARDED_BY(lock_);
+    Dimensions last_dimensions_ GUARDED_BY(lock_);
 
     VideoCapturer* capturer_ GUARDED_BY(lock_);
     bool sending_ GUARDED_BY(lock_);
diff --git a/media/webrtc/webrtcvideoengine2_unittest.cc b/media/webrtc/webrtcvideoengine2_unittest.cc
index 0b85723..afea370 100644
--- a/media/webrtc/webrtcvideoengine2_unittest.cc
+++ b/media/webrtc/webrtcvideoengine2_unittest.cc
@@ -294,12 +294,9 @@
   return NULL;
 }
 
-uint32_t FakeCall::SendBitrateEstimate() {
-  return 0;
-}
-
-uint32_t FakeCall::ReceiveBitrateEstimate() {
-  return 0;
+webrtc::Call::Stats FakeCall::GetStats() const {
+  webrtc::Call::Stats stats;
+  return stats;
 }
 
 void FakeCall::SignalNetworkState(webrtc::Call::NetworkState state) {
diff --git a/media/webrtc/webrtcvideoengine2_unittest.h b/media/webrtc/webrtcvideoengine2_unittest.h
index 3b62289..48c4f64 100644
--- a/media/webrtc/webrtcvideoengine2_unittest.h
+++ b/media/webrtc/webrtcvideoengine2_unittest.h
@@ -127,8 +127,7 @@
       webrtc::VideoReceiveStream* receive_stream) OVERRIDE;
   virtual webrtc::PacketReceiver* Receiver() OVERRIDE;
 
-  virtual uint32_t SendBitrateEstimate() OVERRIDE;
-  virtual uint32_t ReceiveBitrateEstimate() OVERRIDE;
+  virtual webrtc::Call::Stats GetStats() const OVERRIDE;
 
   virtual void SignalNetworkState(webrtc::Call::NetworkState state) OVERRIDE;
 
diff --git a/media/webrtc/webrtcvoiceengine.cc b/media/webrtc/webrtcvoiceengine.cc
index 95e16e4..74b3163 100644
--- a/media/webrtc/webrtcvoiceengine.cc
+++ b/media/webrtc/webrtcvoiceengine.cc
@@ -74,7 +74,7 @@
   { "ISAC",   32000,  1, 104, true },
   { "CELT",   32000,  1, 109, true },
   { "CELT",   32000,  2, 110, true },
-  { "G722",   16000,  1, 9,   false },
+  { "G722",   8000,   1, 9,   false },
   { "ILBC",   8000,   1, 102, false },
   { "PCMU",   8000,   1, 0,   false },
   { "PCMA",   8000,   1, 8,   false },
@@ -110,6 +110,7 @@
 
 static const char kIsacCodecName[] = "ISAC";
 static const char kL16CodecName[] = "L16";
+static const char kG722CodecName[] = "G722";
 
 // Parameter used for NACK.
 // This value is equivalent to 5 seconds of audio data at 20 ms per packet.
@@ -485,12 +486,24 @@
   voe_codec->rate = GetOpusBitrate(codec, *max_playback_rate);
 }
 
+// Changes RTP timestamp rate of G722. This is due to the "bug" in the RFC
+// which says that G722 should be advertised as 8 kHz although it is a 16 kHz
+// codec.
+static void MaybeFixupG722(webrtc::CodecInst* voe_codec, int new_plfreq) {
+  if (_stricmp(voe_codec->plname, kG722CodecName) == 0) {
+    // If the ASSERT triggers, the codec definition in WebRTC VoiceEngine
+    // has changed, and this special case is no longer needed.
+    ASSERT(voe_codec->plfreq != new_plfreq);
+    voe_codec->plfreq = new_plfreq;
+  }
+}
+
 void WebRtcVoiceEngine::ConstructCodecs() {
   LOG(LS_INFO) << "WebRtc VoiceEngine codecs:";
   int ncodecs = voe_wrapper_->codec()->NumOfCodecs();
   for (int i = 0; i < ncodecs; ++i) {
     webrtc::CodecInst voe_codec;
-    if (voe_wrapper_->codec()->GetCodec(i, voe_codec) != -1) {
+    if (GetVoeCodec(i, voe_codec)) {
       // Skip uncompressed formats.
       if (_stricmp(voe_codec.plname, kL16CodecName) == 0) {
         continue;
@@ -540,6 +553,15 @@
   std::sort(codecs_.begin(), codecs_.end(), &AudioCodec::Preferable);
 }
 
+bool WebRtcVoiceEngine::GetVoeCodec(int index, webrtc::CodecInst& codec) {
+  if (voe_wrapper_->codec()->GetCodec(index, codec) != -1) {
+    // Change the sample rate of G722 to 8000 to match SDP.
+    MaybeFixupG722(&codec, 8000);
+    return true;
+  }
+  return false;
+}
+
 WebRtcVoiceEngine::~WebRtcVoiceEngine() {
   LOG(LS_VERBOSE) << "WebRtcVoiceEngine::~WebRtcVoiceEngine";
   if (voe_wrapper_->base()->DeRegisterVoiceEngineObserver() == -1) {
@@ -1224,7 +1246,7 @@
   int ncodecs = voe_wrapper_->codec()->NumOfCodecs();
   for (int i = 0; i < ncodecs; ++i) {
     webrtc::CodecInst voe_codec;
-    if (voe_wrapper_->codec()->GetCodec(i, voe_codec) != -1) {
+    if (GetVoeCodec(i, voe_codec)) {
       AudioCodec codec(voe_codec.pltype, voe_codec.plname, voe_codec.plfreq,
                        voe_codec.rate, voe_codec.channels, 0);
       bool multi_rate = IsCodecMultiRate(voe_codec);
@@ -1243,6 +1265,9 @@
             voe_codec.rate = in.bitrate;
           }
 
+          // Reset G722 sample rate to 16000 to match WebRTC.
+          MaybeFixupG722(&voe_codec, 16000);
+
           // Apply codec-specific settings.
           if (IsIsac(codec)) {
             // If ISAC and an explicit bitrate is not specified,
diff --git a/media/webrtc/webrtcvoiceengine.h b/media/webrtc/webrtcvoiceengine.h
index f19059b..34b9f3c 100644
--- a/media/webrtc/webrtcvoiceengine.h
+++ b/media/webrtc/webrtcvoiceengine.h
@@ -199,6 +199,7 @@
 
   void Construct();
   void ConstructCodecs();
+  bool GetVoeCodec(int index, webrtc::CodecInst& codec);
   bool InitInternal();
   bool EnsureSoundclipEngineInit();
   void SetTraceFilter(int filter);
diff --git a/media/webrtc/webrtcvoiceengine_unittest.cc b/media/webrtc/webrtcvoiceengine_unittest.cc
index 5deabd2..5eb6e24 100644
--- a/media/webrtc/webrtcvoiceengine_unittest.cc
+++ b/media/webrtc/webrtcvoiceengine_unittest.cc
@@ -52,14 +52,16 @@
 static const cricket::AudioCodec kIsacCodec(103, "ISAC", 16000, 32000, 1, 0);
 static const cricket::AudioCodec kCeltCodec(110, "CELT", 32000, 64000, 2, 0);
 static const cricket::AudioCodec kOpusCodec(111, "opus", 48000, 64000, 2, 0);
+static const cricket::AudioCodec kG722CodecVoE(9, "G722", 16000, 64000, 1, 0);
+static const cricket::AudioCodec kG722CodecSdp(9, "G722", 8000, 64000, 1, 0);
 static const cricket::AudioCodec kRedCodec(117, "red", 8000, 0, 1, 0);
 static const cricket::AudioCodec kCn8000Codec(13, "CN", 8000, 0, 1, 0);
 static const cricket::AudioCodec kCn16000Codec(105, "CN", 16000, 0, 1, 0);
 static const cricket::AudioCodec
     kTelephoneEventCodec(106, "telephone-event", 8000, 0, 1, 0);
 static const cricket::AudioCodec* const kAudioCodecs[] = {
-    &kPcmuCodec, &kIsacCodec, &kCeltCodec, &kOpusCodec, &kRedCodec,
-    &kCn8000Codec, &kCn16000Codec, &kTelephoneEventCodec,
+    &kPcmuCodec, &kIsacCodec, &kCeltCodec, &kOpusCodec, &kG722CodecVoE,
+    &kRedCodec, &kCn8000Codec, &kCn16000Codec, &kTelephoneEventCodec,
 };
 const char kRingbackTone[] = "RIFF____WAVE____ABCD1234";
 static uint32 kSsrc1 = 0x99;
@@ -770,6 +772,20 @@
   EXPECT_EQ(1, voe_.GetNumSetSendCodecs());
 }
 
+// Verify that G722 is set with 16000 samples per second to WebRTC.
+TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecG722) {
+  EXPECT_TRUE(SetupEngine());
+  int channel_num = voe_.GetLastChannel();
+  std::vector<cricket::AudioCodec> codecs;
+  codecs.push_back(kG722CodecSdp);
+  EXPECT_TRUE(channel_->SetSendCodecs(codecs));
+  webrtc::CodecInst gcodec;
+  EXPECT_EQ(0, voe_.GetSendCodec(channel_num, gcodec));
+  EXPECT_STREQ("G722", gcodec.plname);
+  EXPECT_EQ(1, gcodec.channels);
+  EXPECT_EQ(16000, gcodec.plfreq);
+}
+
 // Test that if clockrate is not 48000 for opus, we fail.
 TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecOpusBadClockrate) {
   EXPECT_TRUE(SetupEngine());
@@ -3208,7 +3224,7 @@
   EXPECT_TRUE(engine.FindCodec(
       cricket::AudioCodec(96, "PCMA", 8000, 0, 1, 0)));
   EXPECT_TRUE(engine.FindCodec(
-      cricket::AudioCodec(96, "G722", 16000, 0, 1, 0)));
+      cricket::AudioCodec(96, "G722", 8000, 0, 1, 0)));
   EXPECT_TRUE(engine.FindCodec(
       cricket::AudioCodec(96, "red", 8000, 0, 1, 0)));
   EXPECT_TRUE(engine.FindCodec(
@@ -3225,7 +3241,7 @@
   EXPECT_TRUE(engine.FindCodec(
       cricket::AudioCodec(8, "", 8000, 0, 1, 0)));   // PCMA
   EXPECT_TRUE(engine.FindCodec(
-      cricket::AudioCodec(9, "", 16000, 0, 1, 0)));  // G722
+      cricket::AudioCodec(9, "", 8000, 0, 1, 0)));  // G722
   EXPECT_TRUE(engine.FindCodec(
       cricket::AudioCodec(13, "", 8000, 0, 1, 0)));  // CN
   // Check sample/bitrate matching.
@@ -3248,7 +3264,7 @@
       EXPECT_EQ(103, it->id);
     } else if (it->name == "ISAC" && it->clockrate == 32000) {
       EXPECT_EQ(104, it->id);
-    } else if (it->name == "G722" && it->clockrate == 16000) {
+    } else if (it->name == "G722" && it->clockrate == 8000) {
       EXPECT_EQ(9, it->id);
     } else if (it->name == "telephone-event") {
       EXPECT_EQ(126, it->id);
diff --git a/session/media/mediasessionclient_unittest.cc b/session/media/mediasessionclient_unittest.cc
index 3e8a90f..eb052ef 100644
--- a/session/media/mediasessionclient_unittest.cc
+++ b/session/media/mediasessionclient_unittest.cc
@@ -61,7 +61,7 @@
   cricket::AudioCodec(119, "ISACLC", 16000, 40000, 1, 16),
   cricket::AudioCodec(99,  "speex",  16000, 22000, 1, 15),
   cricket::AudioCodec(97,  "IPCMWB", 16000, 80000, 1, 14),
-  cricket::AudioCodec(9,   "G722",   16000, 64000, 1, 13),
+  cricket::AudioCodec(9,   "G722",   8000,  64000, 1, 13),
   cricket::AudioCodec(102, "iLBC",   8000,  13300, 1, 12),
   cricket::AudioCodec(98,  "speex",  8000,  11000, 1, 11),
   cricket::AudioCodec(3,   "GSM",    8000,  13000, 1, 10),
@@ -81,7 +81,7 @@
 static const cricket::AudioCodec kAudioCodecsDifferentPreference[] = {
   cricket::AudioCodec(104, "ISAC",   32000, -1,    1, 17),
   cricket::AudioCodec(97,  "IPCMWB", 16000, 80000, 1, 14),
-  cricket::AudioCodec(9,   "G722",   16000, 64000, 1, 13),
+  cricket::AudioCodec(9,   "G722",   8000,  64000, 1, 13),
   cricket::AudioCodec(119, "ISACLC", 16000, 40000, 1, 16),
   cricket::AudioCodec(103, "ISAC",   16000, -1,    1, 18),
   cricket::AudioCodec(99,  "speex",  16000, 22000, 1, 15),
@@ -197,7 +197,7 @@
      "      <payload-type xmlns='http://www.google.com/session/phone' " \
      "        id='97' name='IPCMWB' clockrate='16000' bitrate='80000' />   " \
      "      <payload-type xmlns='http://www.google.com/session/phone' " \
-     "        id='9' name='G722' clockrate='16000' bitrate='64000' /> " \
+     "        id='9' name='G722' clockrate='8000' bitrate='64000' /> " \
      "      <payload-type xmlns='http://www.google.com/session/phone' " \
      "        id='102' name='iLBC' clockrate='8000' bitrate='13300' />" \
      "      <payload-type xmlns='http://www.google.com/session/phone' " \
@@ -248,7 +248,7 @@
      "          <parameter name='bitrate' value='80000'/>               " \
      "        </payload-type>                                           " \
      "        <payload-type                                             " \
-     "          id='9' name='G722' clockrate='16000'>                   " \
+     "          id='9' name='G722' clockrate='8000'>                   " \
      "          <parameter name='bitrate' value='64000'/>               " \
      "        </payload-type>                                           " \
      "        <payload-type                                             " \
@@ -1918,7 +1918,7 @@
      e = NextFromPayloadType(e);
     ASSERT_TRUE(e != NULL);
     codec = AudioCodecFromPayloadType(e);
-    VerifyAudioCodec(codec, 9, "G722", 16000, 64000, 1);
+    VerifyAudioCodec(codec, 9, "G722", 8000, 64000, 1);
 
     e = NextFromPayloadType(e);
     ASSERT_TRUE(e != NULL);
@@ -2112,7 +2112,7 @@
     codec = AudioCodecFromPayloadType(e);
     ASSERT_EQ(9, codec.id);
     ASSERT_EQ("G722", codec.name);
-    ASSERT_EQ(16000, codec.clockrate);
+    ASSERT_EQ(8000, codec.clockrate);
     ASSERT_EQ(64000, codec.bitrate);
     ASSERT_EQ(1, codec.channels);