Add new view that renders local video using AVCaptureLayerPreview.

BUG=

Review URL: https://codereview.webrtc.org/1497393002

Cr-Commit-Position: refs/heads/master@{#10940}
diff --git a/PRESUBMIT.py b/PRESUBMIT.py
index 60323c8..09cf050 100755
--- a/PRESUBMIT.py
+++ b/PRESUBMIT.py
@@ -405,7 +405,6 @@
       input_api, output_api))
   results.extend(input_api.canned_checks.CheckChangeTodoHasOwner(
       input_api, output_api))
-  results.extend(_CheckApprovedFilesLintClean(input_api, output_api))
   results.extend(_CheckNativeApiHeaderChanges(input_api, output_api))
   results.extend(_CheckNoIOStreamInHeaders(input_api, output_api))
   results.extend(_CheckNoFRIEND_TEST(input_api, output_api))
diff --git a/talk/app/webrtc/objc/avfoundationvideocapturer.mm b/talk/app/webrtc/objc/avfoundationvideocapturer.mm
index e1b0f88..0f9dc68 100644
--- a/talk/app/webrtc/objc/avfoundationvideocapturer.mm
+++ b/talk/app/webrtc/objc/avfoundationvideocapturer.mm
@@ -33,6 +33,8 @@
 #import <Foundation/Foundation.h>
 #import <UIKit/UIKit.h>
 
+#import "webrtc/base/objc/RTCDispatcher.h"
+
 // TODO(tkchin): support other formats.
 static NSString* const kDefaultPreset = AVCaptureSessionPreset640x480;
 static cricket::VideoFormat const kDefaultFormat =
@@ -41,11 +43,6 @@
                          cricket::VideoFormat::FpsToInterval(30),
                          cricket::FOURCC_NV12);
 
-// This queue is used to start and stop the capturer without blocking the
-// calling thread. -[AVCaptureSession startRunning] blocks until the camera is
-// running.
-static dispatch_queue_t kBackgroundQueue = nil;
-
 // This class used to capture frames using AVFoundation APIs on iOS. It is meant
 // to be owned by an instance of AVFoundationVideoCapturer. The reason for this
 // because other webrtc objects own cricket::VideoCapturer, which is not
@@ -80,15 +77,6 @@
 @synthesize useBackCamera = _useBackCamera;
 @synthesize isRunning = _isRunning;
 
-+ (void)initialize {
-  static dispatch_once_t onceToken;
-  dispatch_once(&onceToken, ^{
-    kBackgroundQueue = dispatch_queue_create(
-        "com.google.webrtc.RTCAVFoundationCapturerBackground",
-        DISPATCH_QUEUE_SERIAL);
-  });
-}
-
 - (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer*)capturer {
   NSParameterAssert(capturer);
   if (self = [super init]) {
@@ -132,9 +120,10 @@
   _orientationHasChanged = NO;
   [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
   AVCaptureSession* session = _captureSession;
-  dispatch_async(kBackgroundQueue, ^{
+  [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
+                               block:^{
     [session startRunning];
-  });
+  }];
   _isRunning = YES;
 }
 
@@ -144,9 +133,10 @@
   }
   [_videoOutput setSampleBufferDelegate:nil queue:nullptr];
   AVCaptureSession* session = _captureSession;
-  dispatch_async(kBackgroundQueue, ^{
+  [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
+                               block:^{
     [session stopRunning];
-  });
+  }];
   [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
   _isRunning = NO;
 }
diff --git a/talk/libjingle.gyp b/talk/libjingle.gyp
index c9ef58d..ceff633 100755
--- a/talk/libjingle.gyp
+++ b/talk/libjingle.gyp
@@ -368,6 +368,9 @@
                 'app/webrtc/objc/public/RTCEAGLVideoView.h',
                 'app/webrtc/objc/public/RTCAVFoundationVideoSource.h',
               ],
+              'dependencies': [
+                '<(webrtc_root)/base/base.gyp:rtc_base_objc',
+              ],
               'link_settings': {
                 'xcode_settings': {
                   'OTHER_LDFLAGS': [
diff --git a/webrtc/base/BUILD.gn b/webrtc/base/BUILD.gn
index 4f42695..c7c953d 100644
--- a/webrtc/base/BUILD.gn
+++ b/webrtc/base/BUILD.gn
@@ -616,6 +616,10 @@
     public_configs = [ "..:common_inherited_config" ]
 
     sources = [
+      "objc/RTCCameraPreviewView.h",
+      "objc/RTCCameraPreviewView.m",
+      "objc/RTCDispatcher.h",
+      "objc/RTCDispatcher.m",
       "objc/RTCLogging.h",
       "objc/RTCLogging.mm",
     ]
diff --git a/webrtc/base/base.gyp b/webrtc/base/base.gyp
index 23a3c26..9d10403 100644
--- a/webrtc/base/base.gyp
+++ b/webrtc/base/base.gyp
@@ -33,8 +33,12 @@
             'rtc_base',
           ],
           'sources': [
+            'objc/RTCCameraPreviewView.h',
+            'objc/RTCCameraPreviewView.m',
+            'objc/RTCDispatcher.h',
+            'objc/RTCDispatcher.m',
             'objc/RTCLogging.h',
-            'objc/RTCLogging.mm'
+            'objc/RTCLogging.mm',
           ],
           'xcode_settings': {
             'CLANG_ENABLE_OBJC_ARC': 'YES',
diff --git a/webrtc/base/objc/RTCCameraPreviewView.h b/webrtc/base/objc/RTCCameraPreviewView.h
new file mode 100644
index 0000000..03e94c2
--- /dev/null
+++ b/webrtc/base/objc/RTCCameraPreviewView.h
@@ -0,0 +1,28 @@
+/*
+ *  Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+#import <UIKit/UIKit.h>
+
+@class AVCaptureSession;
+@class RTCAVFoundationVideoSource;
+
+/** RTCCameraPreviewView is a view that renders local video from an
+ *  AVCaptureSession.
+ */
+@interface RTCCameraPreviewView : UIView
+
+/** The capture session being rendered in the view. Capture session
+ *  is assigned to AVCaptureVideoPreviewLayer async in the same
+ *  queue that the AVCaptureSession is started/stopped.
+ */
+@property(nonatomic, strong) AVCaptureSession *captureSession;
+
+@end
diff --git a/webrtc/base/objc/RTCCameraPreviewView.m b/webrtc/base/objc/RTCCameraPreviewView.m
new file mode 100644
index 0000000..5a57483
--- /dev/null
+++ b/webrtc/base/objc/RTCCameraPreviewView.m
@@ -0,0 +1,47 @@
+/*
+ *  Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#if !defined(__has_feature) || !__has_feature(objc_arc)
+#error "This file requires ARC support."
+#endif
+
+#import "webrtc/base/objc/RTCCameraPreviewView.h"
+
+#import <AVFoundation/AVFoundation.h>
+
+#import "webrtc/base/objc/RTCDispatcher.h"
+
+@implementation RTCCameraPreviewView
+
+@synthesize captureSession = _captureSession;
+
++ (Class)layerClass {
+  return [AVCaptureVideoPreviewLayer class];
+}
+
+- (void)setCaptureSession:(AVCaptureSession *)captureSession {
+  if (_captureSession == captureSession) {
+    return;
+  }
+  _captureSession = captureSession;
+  AVCaptureVideoPreviewLayer *previewLayer = [self previewLayer];
+  [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
+                               block:^{
+    previewLayer.session = captureSession;
+  }];
+}
+
+#pragma mark - Private
+
+- (AVCaptureVideoPreviewLayer *)previewLayer {
+  return (AVCaptureVideoPreviewLayer *)self.layer;
+}
+
+@end
diff --git a/webrtc/base/objc/RTCDispatcher.h b/webrtc/base/objc/RTCDispatcher.h
new file mode 100644
index 0000000..c32b93d
--- /dev/null
+++ b/webrtc/base/objc/RTCDispatcher.h
@@ -0,0 +1,35 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+typedef NS_ENUM(NSInteger, RTCDispatcherQueueType) {
+  // Main dispatcher queue.
+  RTCDispatcherTypeMain,
+  // Used for starting/stopping AVCaptureSession, and assigning
+  // capture session to AVCaptureVideoPreviewLayer.
+  RTCDispatcherTypeCaptureSession,
+};
+
+/** Dispatcher that asynchronously dispatches blocks to a specific
+ *  shared dispatch queue.
+ */
+@interface RTCDispatcher : NSObject
+
+- (instancetype)init NS_UNAVAILABLE;
+
+/** Dispatch the block asynchronously on the queue for dispatchType.
+ *  @param dispatchType The queue type to dispatch on.
+ *  @param block The block to dispatch asynchronously.
+ */
++ (void)dispatchAsyncOnType:(RTCDispatcherQueueType)dispatchType
+                      block:(dispatch_block_t)block;
+
+@end
diff --git a/webrtc/base/objc/RTCDispatcher.m b/webrtc/base/objc/RTCDispatcher.m
new file mode 100644
index 0000000..065705a
--- /dev/null
+++ b/webrtc/base/objc/RTCDispatcher.m
@@ -0,0 +1,46 @@
+/*
+ *  Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCDispatcher.h"
+
+static dispatch_queue_t kCaptureSessionQueue = nil;
+
+@implementation RTCDispatcher {
+  dispatch_queue_t _captureSessionQueue;
+}
+
++ (void)initialize {
+  static dispatch_once_t onceToken;
+  dispatch_once(&onceToken, ^{
+    kCaptureSessionQueue = dispatch_queue_create(
+        "org.webrtc.RTCDispatcherCaptureSession",
+        DISPATCH_QUEUE_SERIAL);
+  });
+}
+
++ (void)dispatchAsyncOnType:(RTCDispatcherQueueType)dispatchType
+                      block:(dispatch_block_t)block {
+  dispatch_queue_t queue = [self dispatchQueueForType:dispatchType];
+  dispatch_async(queue, block);
+}
+
+#pragma mark - Private
+
++ (dispatch_queue_t)dispatchQueueForType:(RTCDispatcherQueueType)dispatchType {
+  switch (dispatchType) {
+    case RTCDispatcherTypeMain:
+      return dispatch_get_main_queue();
+    case RTCDispatcherTypeCaptureSession:
+      return kCaptureSessionQueue;
+  }
+}
+
+@end
+
diff --git a/webrtc/examples/objc/AppRTCDemo/ios/ARDMainView.m b/webrtc/examples/objc/AppRTCDemo/ios/ARDMainView.m
index 3c9e46e..e809cb3 100644
--- a/webrtc/examples/objc/AppRTCDemo/ios/ARDMainView.m
+++ b/webrtc/examples/objc/AppRTCDemo/ios/ARDMainView.m
@@ -21,15 +21,8 @@
 static CGFloat const kCallControlMargin = 8;
 static CGFloat const kAppLabelHeight = 20;
 
-@class ARDRoomTextField;
-@protocol ARDRoomTextFieldDelegate <NSObject>
-- (void)roomTextField:(ARDRoomTextField *)roomTextField
-         didInputRoom:(NSString *)room;
-@end
-
 // Helper view that contains a text field and a clear button.
 @interface ARDRoomTextField : UIView <UITextFieldDelegate>
-@property(nonatomic, weak) id<ARDRoomTextFieldDelegate> delegate;
 @property(nonatomic, readonly) NSString *roomText;
 @end
 
@@ -38,14 +31,14 @@
   UIButton *_clearButton;
 }
 
-@synthesize delegate = _delegate;
-
 - (instancetype)initWithFrame:(CGRect)frame {
   if (self = [super initWithFrame:frame]) {
     _roomText = [[UITextField alloc] initWithFrame:CGRectZero];
     _roomText.borderStyle = UITextBorderStyleNone;
     _roomText.font = [UIFont fontWithName:@"Roboto" size:12];
     _roomText.placeholder = @"Room name";
+    _roomText.autocorrectionType = UITextAutocorrectionTypeNo;
+    _roomText.autocapitalizationType = UITextAutocapitalizationTypeNone;
     _roomText.delegate = self;
     [_roomText addTarget:self
                   action:@selector(textFieldDidChange:)
@@ -96,10 +89,6 @@
 
 #pragma mark - UITextFieldDelegate
 
-- (void)textFieldDidEndEditing:(UITextField *)textField {
-  [_delegate roomTextField:self didInputRoom:textField.text];
-}
-
 - (BOOL)textFieldShouldReturn:(UITextField *)textField {
   // There is no other control that can take focus, so manually resign focus
   // when return (Join) is pressed to trigger |textFieldDidEndEditing|.
@@ -125,9 +114,6 @@
 
 @end
 
-@interface ARDMainView () <ARDRoomTextFieldDelegate>
-@end
-
 @implementation ARDMainView {
   UILabel *_appLabel;
   ARDRoomTextField *_roomText;
@@ -151,7 +137,6 @@
     [self addSubview:_appLabel];
 
     _roomText = [[ARDRoomTextField alloc] initWithFrame:CGRectZero];
-    _roomText.delegate = self;
     [self addSubview:_roomText];
 
     UIFont *controlFont = [UIFont fontWithName:@"Roboto" size:20];
@@ -260,16 +245,6 @@
                                       _startCallButton.frame.size.height);
 }
 
-#pragma mark - ARDRoomTextFieldDelegate
-
-- (void)roomTextField:(ARDRoomTextField *)roomTextField
-         didInputRoom:(NSString *)room {
-  [_delegate mainView:self
-         didInputRoom:room
-           isLoopback:NO
-          isAudioOnly:_audioOnlySwitch.isOn];
-}
-
 #pragma mark - Private
 
 - (void)onStartCall:(id)sender {
diff --git a/webrtc/examples/objc/AppRTCDemo/ios/ARDVideoCallView.h b/webrtc/examples/objc/AppRTCDemo/ios/ARDVideoCallView.h
index 209bcd4..378281d 100644
--- a/webrtc/examples/objc/AppRTCDemo/ios/ARDVideoCallView.h
+++ b/webrtc/examples/objc/AppRTCDemo/ios/ARDVideoCallView.h
@@ -10,6 +10,7 @@
 
 #import <UIKit/UIKit.h>
 
+#import "webrtc/base/objc/RTCCameraPreviewView.h"
 #import "RTCEAGLVideoView.h"
 
 #import "ARDStatsView.h"
@@ -33,7 +34,7 @@
 @interface ARDVideoCallView : UIView
 
 @property(nonatomic, readonly) UILabel *statusLabel;
-@property(nonatomic, readonly) RTCEAGLVideoView *localVideoView;
+@property(nonatomic, readonly) RTCCameraPreviewView *localVideoView;
 @property(nonatomic, readonly) RTCEAGLVideoView *remoteVideoView;
 @property(nonatomic, readonly) ARDStatsView *statsView;
 @property(nonatomic, weak) id<ARDVideoCallViewDelegate> delegate;
diff --git a/webrtc/examples/objc/AppRTCDemo/ios/ARDVideoCallView.m b/webrtc/examples/objc/AppRTCDemo/ios/ARDVideoCallView.m
index 4048b84..4c9c9d2 100644
--- a/webrtc/examples/objc/AppRTCDemo/ios/ARDVideoCallView.m
+++ b/webrtc/examples/objc/AppRTCDemo/ios/ARDVideoCallView.m
@@ -25,7 +25,6 @@
 @implementation ARDVideoCallView {
   UIButton *_cameraSwitchButton;
   UIButton *_hangupButton;
-  CGSize _localVideoSize;
   CGSize _remoteVideoSize;
   BOOL _useRearCamera;
 }
@@ -42,10 +41,7 @@
     _remoteVideoView.delegate = self;
     [self addSubview:_remoteVideoView];
 
-    // TODO(tkchin): replace this with a view that renders layer from
-    // AVCaptureSession.
-    _localVideoView = [[RTCEAGLVideoView alloc] initWithFrame:CGRectZero];
-    _localVideoView.delegate = self;
+    _localVideoView = [[RTCCameraPreviewView alloc] initWithFrame:CGRectZero];
     [self addSubview:_localVideoView];
 
     _statsView = [[ARDStatsView alloc] initWithFrame:CGRectZero];
@@ -114,22 +110,15 @@
     _remoteVideoView.frame = bounds;
   }
 
-  if (_localVideoSize.width && _localVideoSize.height > 0) {
-    // Aspect fit local video view into a square box.
-    CGRect localVideoFrame =
-        CGRectMake(0, 0, kLocalVideoViewSize, kLocalVideoViewSize);
-    localVideoFrame =
-        AVMakeRectWithAspectRatioInsideRect(_localVideoSize, localVideoFrame);
-
-    // Place the view in the bottom right.
-    localVideoFrame.origin.x = CGRectGetMaxX(bounds)
-        - localVideoFrame.size.width - kLocalVideoViewPadding;
-    localVideoFrame.origin.y = CGRectGetMaxY(bounds)
-        - localVideoFrame.size.height - kLocalVideoViewPadding;
-    _localVideoView.frame = localVideoFrame;
-  } else {
-    _localVideoView.frame = bounds;
-  }
+  // Aspect fit local video view into a square box.
+  CGRect localVideoFrame =
+      CGRectMake(0, 0, kLocalVideoViewSize, kLocalVideoViewSize);
+  // Place the view in the bottom right.
+  localVideoFrame.origin.x = CGRectGetMaxX(bounds)
+      - localVideoFrame.size.width - kLocalVideoViewPadding;
+  localVideoFrame.origin.y = CGRectGetMaxY(bounds)
+      - localVideoFrame.size.height - kLocalVideoViewPadding;
+  _localVideoView.frame = localVideoFrame;
 
   // Place stats at the top.
   CGSize statsSize = [_statsView sizeThatFits:bounds.size];
@@ -159,10 +148,7 @@
 #pragma mark - RTCEAGLVideoViewDelegate
 
 - (void)videoView:(RTCEAGLVideoView*)videoView didChangeVideoSize:(CGSize)size {
-  if (videoView == _localVideoView) {
-    _localVideoSize = size;
-    _localVideoView.hidden = CGSizeEqualToSize(CGSizeZero, _localVideoSize);
-  } else if (videoView == _remoteVideoView) {
+ if (videoView == _remoteVideoView) {
     _remoteVideoSize = size;
   }
   [self setNeedsLayout];
diff --git a/webrtc/examples/objc/AppRTCDemo/ios/ARDVideoCallViewController.m b/webrtc/examples/objc/AppRTCDemo/ios/ARDVideoCallViewController.m
index 8de6b95..51290a0 100644
--- a/webrtc/examples/objc/AppRTCDemo/ios/ARDVideoCallViewController.m
+++ b/webrtc/examples/objc/AppRTCDemo/ios/ARDVideoCallViewController.m
@@ -128,18 +128,21 @@
   if (_localVideoTrack == localVideoTrack) {
     return;
   }
-  [_localVideoTrack removeRenderer:_videoCallView.localVideoView];
   _localVideoTrack = nil;
-  [_videoCallView.localVideoView renderFrame:nil];
   _localVideoTrack = localVideoTrack;
-  [_localVideoTrack addRenderer:_videoCallView.localVideoView];
+  RTCAVFoundationVideoSource *source = nil;
+  if ([localVideoTrack.source
+          isKindOfClass:[RTCAVFoundationVideoSource class]]) {
+    source = (RTCAVFoundationVideoSource*)localVideoTrack.source;
+  }
+  _videoCallView.localVideoView.captureSession = source.captureSession;
 }
 
 - (void)setRemoteVideoTrack:(RTCVideoTrack *)remoteVideoTrack {
   if (_remoteVideoTrack == remoteVideoTrack) {
     return;
   }
-  [_remoteVideoTrack removeRenderer:_videoCallView.localVideoView];
+  [_remoteVideoTrack removeRenderer:_videoCallView.remoteVideoView];
   _remoteVideoTrack = nil;
   [_videoCallView.remoteVideoView renderFrame:nil];
   _remoteVideoTrack = remoteVideoTrack;