CVO capturer feature: allow unrotated frame flows through the capture pipeline.

split from https://webrtc-codereview.appspot.com/37029004/

This is based on clean up code change at https://webrtc-codereview.appspot.com/37129004

BUG=4145
R=perkj@webrtc.org, pthatcher@webrtc.org, stefan@webrtc.org, tommi@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/39799004

Cr-Commit-Position: refs/heads/master@{#8337}
git-svn-id: http://webrtc.googlecode.com/svn/trunk@8337 4adac7df-926f-26a2-2b94-8c16560cd09d
diff --git a/talk/media/base/fakevideocapturer.h b/talk/media/base/fakevideocapturer.h
index e8d49c0..c9dce2c 100644
--- a/talk/media/base/fakevideocapturer.h
+++ b/talk/media/base/fakevideocapturer.h
@@ -49,7 +49,8 @@
       : running_(false),
         initial_unix_timestamp_(time(NULL) * rtc::kNumNanosecsPerSec),
         next_timestamp_(rtc::kNumNanosecsPerMillisec),
-        is_screencast_(false) {
+        is_screencast_(false),
+        rotation_(webrtc::kVideoRotation_0) {
 #ifdef HAVE_WEBRTC_VIDEO
     set_frame_factory(new cricket::WebRtcVideoFrameFactory());
 #endif
@@ -115,6 +116,7 @@
     memset(reinterpret_cast<uint8*>(frame.data) + (size / 2), 2,
          size - (size / 2));
     memcpy(frame.data, reinterpret_cast<const uint8*>(&fourcc), 4);
+    frame.rotation = rotation_;
     // TODO(zhurunz): SignalFrameCaptured carry returned value to be able to
     // capture results from downstream.
     SignalFrameCaptured(this, &frame);
@@ -148,11 +150,18 @@
     return true;
   }
 
+  void SetRotation(webrtc::VideoRotation rotation) {
+    rotation_ = rotation;
+  }
+
+  webrtc::VideoRotation GetRotation() { return rotation_; }
+
  private:
   bool running_;
   int64 initial_unix_timestamp_;
   int64 next_timestamp_;
   bool is_screencast_;
+  webrtc::VideoRotation rotation_;
 };
 
 }  // namespace cricket
diff --git a/talk/media/base/videocapturer.cc b/talk/media/base/videocapturer.cc
index d47c8f2..abef449 100644
--- a/talk/media/base/videocapturer.cc
+++ b/talk/media/base/videocapturer.cc
@@ -110,7 +110,8 @@
     : thread_(rtc::Thread::Current()),
       adapt_frame_drops_data_(kMaxAccumulatorSize),
       effect_frame_drops_data_(kMaxAccumulatorSize),
-      frame_time_data_(kMaxAccumulatorSize) {
+      frame_time_data_(kMaxAccumulatorSize),
+      apply_rotation_(true) {
   Construct();
 }
 
@@ -118,7 +119,8 @@
     : thread_(thread),
       adapt_frame_drops_data_(kMaxAccumulatorSize),
       effect_frame_drops_data_(kMaxAccumulatorSize),
-      frame_time_data_(kMaxAccumulatorSize) {
+      frame_time_data_(kMaxAccumulatorSize),
+      apply_rotation_(true) {
   Construct();
 }
 
@@ -254,6 +256,14 @@
   return Pause(false);
 }
 
+bool VideoCapturer::SetApplyRotation(bool enable) {
+  apply_rotation_ = enable;
+  if (frame_factory_) {
+    frame_factory_->SetApplyRotation(apply_rotation_);
+  }
+  return true;
+}
+
 void VideoCapturer::SetSupportedFormats(
     const std::vector<VideoFormat>& formats) {
   supported_formats_ = formats;
@@ -340,6 +350,13 @@
   return ss.str();
 }
 
+void VideoCapturer::set_frame_factory(VideoFrameFactory* frame_factory) {
+  frame_factory_.reset(frame_factory);
+  if (frame_factory) {
+    frame_factory->SetApplyRotation(apply_rotation_);
+  }
+}
+
 void VideoCapturer::GetStats(VariableInfo<int>* adapt_drops_stats,
                              VariableInfo<int>* effect_drops_stats,
                              VariableInfo<double>* frame_time_stats,
diff --git a/talk/media/base/videocapturer.h b/talk/media/base/videocapturer.h
index 55a8923..9e43aaa 100644
--- a/talk/media/base/videocapturer.h
+++ b/talk/media/base/videocapturer.h
@@ -225,6 +225,13 @@
     return capture_state_;
   }
 
+  // Tells videocapturer whether to apply the pending rotation. By default, the
+  // rotation is applied and the generated frame is up right. When set to false,
+  // generated frames will carry the rotation information from
+  // SetCaptureRotation. Return value indicates whether this operation succeeds.
+  virtual bool SetApplyRotation(bool enable);
+  virtual bool GetApplyRotation() { return apply_rotation_; }
+
   // Adds a video processor that will be applied on VideoFrames returned by
   // |SignalVideoFrame|. Multiple video processors can be added. The video
   // processors will be applied in the order they were added.
@@ -301,9 +308,7 @@
   }
 
   // Takes ownership.
-  void set_frame_factory(VideoFrameFactory* frame_factory) {
-    frame_factory_.reset(frame_factory);
-  }
+  void set_frame_factory(VideoFrameFactory* frame_factory);
 
   // Gets statistics for tracked variables recorded since the last call to
   // GetStats.  Note that calling GetStats resets any gathered data so it
@@ -412,6 +417,9 @@
   rtc::CriticalSection crit_;
   VideoProcessors video_processors_;
 
+  // Whether capturer should apply rotation to the frame before signaling it.
+  bool apply_rotation_;
+
   DISALLOW_COPY_AND_ASSIGN(VideoCapturer);
 };
 
diff --git a/talk/media/base/videocapturer_unittest.cc b/talk/media/base/videocapturer_unittest.cc
index 1760d8a..4f6739e 100644
--- a/talk/media/base/videocapturer_unittest.cc
+++ b/talk/media/base/videocapturer_unittest.cc
@@ -83,16 +83,26 @@
       : capture_state_(cricket::CS_STOPPED),
         num_state_changes_(0),
         video_frames_received_(0),
-        last_frame_elapsed_time_(0) {
+        last_frame_elapsed_time_(0),
+        expects_rotation_applied_(true) {
     capturer_.SignalVideoFrame.connect(this, &VideoCapturerTest::OnVideoFrame);
     capturer_.SignalStateChange.connect(this,
                                         &VideoCapturerTest::OnStateChange);
   }
 
+  void set_expected_compensation(bool compensation) {
+    expects_rotation_applied_ = compensation;
+  }
+
  protected:
   void OnVideoFrame(cricket::VideoCapturer*, const cricket::VideoFrame* frame) {
     ++video_frames_received_;
     last_frame_elapsed_time_ = frame->GetElapsedTime();
+    if (expects_rotation_applied_) {
+      EXPECT_EQ(webrtc::kVideoRotation_0, frame->GetRotation());
+    } else {
+      EXPECT_EQ(capturer_.GetRotation(), frame->GetRotation());
+    }
     renderer_.RenderFrame(frame);
   }
   void OnStateChange(cricket::VideoCapturer*,
@@ -113,6 +123,7 @@
   int video_frames_received_;
   int64 last_frame_elapsed_time_;
   cricket::FakeVideoRenderer renderer_;
+  bool expects_rotation_applied_;
 };
 
 TEST_F(VideoCapturerTest, CaptureState) {
@@ -276,6 +287,107 @@
   EXPECT_EQ(1, renderer_.num_rendered_frames());
 }
 
+TEST_F(VideoCapturerTest, TestRotationPending) {
+  int kWidth = 800;
+  int kHeight = 400;
+  int frame_count = 0;
+
+  std::vector<cricket::VideoFormat> formats;
+  formats.push_back(cricket::VideoFormat(kWidth, kHeight,
+                                         cricket::VideoFormat::FpsToInterval(5),
+                                         cricket::FOURCC_I420));
+
+  capturer_.ResetSupportedFormats(formats);
+  // capturer_ should compensate rotation as default.
+  capturer_.UpdateAspectRatio(400, 200);
+
+  EXPECT_EQ(cricket::CS_RUNNING,
+            capturer_.Start(cricket::VideoFormat(
+                kWidth, kHeight, cricket::VideoFormat::FpsToInterval(30),
+                cricket::FOURCC_I420)));
+  EXPECT_TRUE(capturer_.IsRunning());
+  EXPECT_EQ(0, renderer_.num_rendered_frames());
+
+  // If the frame's rotation is compensated anywhere in the pipeline based on
+  // the rotation information, the renderer should be given the right dimension
+  // such that the frame could be rendered.
+
+  // Swap the dimension for the next 2 frames which are rotated by 90 and 270
+  // degree.
+  int expected_width = kHeight;
+  int expected_height = kWidth;
+  NormalizeVideoSize(&expected_width, &expected_height);
+  renderer_.SetSize(expected_width, expected_height, 0);
+
+  capturer_.SetRotation(webrtc::kVideoRotation_90);
+  EXPECT_TRUE(capturer_.CaptureFrame());
+  EXPECT_EQ(++frame_count, renderer_.num_rendered_frames());
+
+  capturer_.SetRotation(webrtc::kVideoRotation_270);
+  EXPECT_TRUE(capturer_.CaptureFrame());
+  EXPECT_EQ(++frame_count, renderer_.num_rendered_frames());
+
+  // Reset the renderer to have corresponding width and height.
+  expected_width = kWidth;
+  expected_height = kHeight;
+  NormalizeVideoSize(&expected_width, &expected_height);
+  renderer_.SetSize(expected_width, expected_height, 0);
+
+  capturer_.SetRotation(webrtc::kVideoRotation_180);
+  EXPECT_TRUE(capturer_.CaptureFrame());
+  EXPECT_EQ(++frame_count, renderer_.num_rendered_frames());
+}
+
+TEST_F(VideoCapturerTest, TestRotationApplied) {
+  int kWidth = 800;
+  int kHeight = 400;
+
+  std::vector<cricket::VideoFormat> formats;
+  formats.push_back(cricket::VideoFormat(kWidth, kHeight,
+                                         cricket::VideoFormat::FpsToInterval(5),
+                                         cricket::FOURCC_I420));
+
+  capturer_.ResetSupportedFormats(formats);
+  // capturer_ should not compensate rotation.
+  capturer_.SetApplyRotation(false);
+  capturer_.UpdateAspectRatio(400, 200);
+  set_expected_compensation(false);
+
+  EXPECT_EQ(cricket::CS_RUNNING,
+            capturer_.Start(cricket::VideoFormat(
+                kWidth, kHeight, cricket::VideoFormat::FpsToInterval(30),
+                cricket::FOURCC_I420)));
+  EXPECT_TRUE(capturer_.IsRunning());
+  EXPECT_EQ(0, renderer_.num_rendered_frames());
+
+  int expected_width = kWidth;
+  int expected_height = kHeight;
+  NormalizeVideoSize(&expected_width, &expected_height);
+  renderer_.SetSize(expected_width, expected_height, 0);
+
+  // If the frame's rotation is compensated anywhere in the pipeline, the frame
+  // won't have its original dimension out from capturer. Since the renderer
+  // here has the same dimension as the capturer, it will skip that frame as the
+  // resolution won't match anymore.
+
+  int frame_count = 0;
+  capturer_.SetRotation(webrtc::kVideoRotation_0);
+  EXPECT_TRUE(capturer_.CaptureFrame());
+  EXPECT_EQ(++frame_count, renderer_.num_rendered_frames());
+
+  capturer_.SetRotation(webrtc::kVideoRotation_90);
+  EXPECT_TRUE(capturer_.CaptureFrame());
+  EXPECT_EQ(++frame_count, renderer_.num_rendered_frames());
+
+  capturer_.SetRotation(webrtc::kVideoRotation_180);
+  EXPECT_TRUE(capturer_.CaptureFrame());
+  EXPECT_EQ(++frame_count, renderer_.num_rendered_frames());
+
+  capturer_.SetRotation(webrtc::kVideoRotation_270);
+  EXPECT_TRUE(capturer_.CaptureFrame());
+  EXPECT_EQ(++frame_count, renderer_.num_rendered_frames());
+}
+
 TEST_F(VideoCapturerTest, ScreencastScaledSuperLarge) {
   capturer_.SetScreencast(true);
 
diff --git a/talk/media/base/videoframe.h b/talk/media/base/videoframe.h
index 3496840..7c5ce5b 100644
--- a/talk/media/base/videoframe.h
+++ b/talk/media/base/videoframe.h
@@ -118,11 +118,13 @@
 
   // Indicates the rotation angle in degrees.
   // TODO(guoweis): Remove this function, rename GetVideoRotation and remove the
-  // skeleton implementation to GetRotation once chrome is updated.
+  // skeleton implementation of GetRotation once chrome is updated.
   virtual int GetRotation() const { return GetVideoRotation(); }
   virtual webrtc::VideoRotation GetVideoRotation() const {
     return webrtc::kVideoRotation_0;
   }
+  // TODO(guoweis): Remove the skeleton implementation once chrome is updated.
+  virtual void SetRotation(webrtc::VideoRotation rotation) {}
 
   // Make a shallow copy of the frame. The frame buffer itself is not copied.
   // Both the current and new VideoFrame will share a single reference-counted
diff --git a/talk/media/base/videoframefactory.h b/talk/media/base/videoframefactory.h
index 45e0831..b0dbf16 100644
--- a/talk/media/base/videoframefactory.h
+++ b/talk/media/base/videoframefactory.h
@@ -40,7 +40,7 @@
 // depending on the subclass of VideoFrameFactory.
 class VideoFrameFactory {
  public:
-  VideoFrameFactory() {}
+  VideoFrameFactory() : apply_rotation_(true) {}
   virtual ~VideoFrameFactory() {}
 
   // The returned frame aliases the aliased_frame if the input color
@@ -65,6 +65,11 @@
                                          int output_width,
                                          int output_height) const;
 
+  void SetApplyRotation(bool enable) { apply_rotation_ = enable; }
+
+ protected:
+  bool apply_rotation_;
+
  private:
   // An internal frame buffer to avoid reallocations. It is mutable because it
   // does not affect behaviour, only performance.
diff --git a/talk/media/webrtc/fakewebrtcvideocapturemodule.h b/talk/media/webrtc/fakewebrtcvideocapturemodule.h
index 7085a3d..55a5157 100644
--- a/talk/media/webrtc/fakewebrtcvideocapturemodule.h
+++ b/talk/media/webrtc/fakewebrtcvideocapturemodule.h
@@ -100,6 +100,12 @@
       webrtc::VideoCaptureRotation rotation) OVERRIDE {
     return -1;  // not implemented
   }
+  virtual bool SetApplyRotation(bool enable) OVERRIDE {
+    return false;  // not implemented
+  }
+  virtual bool GetApplyRotation() OVERRIDE {
+    return true;  // Rotation compensation is turned on.
+  }
   virtual VideoCaptureEncodeInterface* GetEncodeInterface(
       const webrtc::VideoCodec& codec) OVERRIDE {
     return NULL;  // not implemented
diff --git a/talk/media/webrtc/webrtcvideocapturer.cc b/talk/media/webrtc/webrtcvideocapturer.cc
index c86abcc..259f53a 100644
--- a/talk/media/webrtc/webrtcvideocapturer.cc
+++ b/talk/media/webrtc/webrtcvideocapturer.cc
@@ -213,6 +213,10 @@
   module_->AddRef();
   SetId(device.id);
   SetSupportedFormats(supported);
+
+  // Ensure these 2 have the same value.
+  SetApplyRotation(module_->GetApplyRotation());
+
   return true;
 }
 
@@ -249,6 +253,16 @@
   }
   return true;
 }
+bool WebRtcVideoCapturer::SetApplyRotation(bool enable) {
+  rtc::CritScope cs(&critical_section_stopping_);
+
+  assert(module_);
+
+  if (!VideoCapturer::SetApplyRotation(enable)) {
+    return false;
+  }
+  return module_->SetApplyRotation(enable);
+}
 
 CaptureState WebRtcVideoCapturer::Start(const VideoFormat& capture_format) {
   if (!module_) {
@@ -381,6 +395,7 @@
   time_stamp = elapsed_time;
   data_size = rtc::checked_cast<uint32>(length);
   data = buffer;
+  rotation = sample.rotation();
 }
 
 }  // namespace cricket
diff --git a/talk/media/webrtc/webrtcvideocapturer.h b/talk/media/webrtc/webrtcvideocapturer.h
index 631aa17..e7d870b 100644
--- a/talk/media/webrtc/webrtcvideocapturer.h
+++ b/talk/media/webrtc/webrtcvideocapturer.h
@@ -72,6 +72,7 @@
   virtual void Stop();
   virtual bool IsRunning();
   virtual bool IsScreencast() const { return false; }
+  virtual bool SetApplyRotation(bool enable);
 
  protected:
   // Override virtual methods of the parent class VideoCapturer.
diff --git a/talk/media/webrtc/webrtcvideoengine.cc b/talk/media/webrtc/webrtcvideoengine.cc
index 56ee28c..7fbb8de 100644
--- a/talk/media/webrtc/webrtcvideoengine.cc
+++ b/talk/media/webrtc/webrtcvideoengine.cc
@@ -394,6 +394,24 @@
     if (!renderer_) {
       return 0;
     }
+    if (!webrtc_frame->native_handle()) {
+      WebRtcVideoRenderFrame cricket_frame(webrtc_frame, elapsed_time_ms);
+      return renderer_->RenderFrame(&cricket_frame) ? 0 : -1;
+    } else {
+      return DeliverTextureFrame(
+          webrtc_frame->native_handle(),
+          webrtc_frame->render_time_ms() * rtc::kNumNanosecsPerMillisec,
+          elapsed_time_ms * rtc::kNumNanosecsPerMillisec);
+    }
+  }
+
+  virtual bool IsTextureSupported() { return true; }
+
+  int DeliverBufferFrame(unsigned char* buffer, size_t buffer_size,
+                         int64 time_stamp, int64 elapsed_time) {
+    WebRtcVideoFrame video_frame;
+    video_frame.Alias(buffer, buffer_size, width_, height_, 1, 1, elapsed_time,
+                      time_stamp, webrtc::kVideoRotation_0);
 
     WebRtcVideoRenderFrame cricket_frame(&webrtc_frame, elapsed_time_ms);
     return renderer_->RenderFrame(&cricket_frame) ? 0 : -1;
diff --git a/talk/media/webrtc/webrtcvideoframe.cc b/talk/media/webrtc/webrtcvideoframe.cc
index e99930c..6a49d9b 100644
--- a/talk/media/webrtc/webrtcvideoframe.cc
+++ b/talk/media/webrtc/webrtcvideoframe.cc
@@ -139,9 +139,13 @@
                frame->time_stamp, frame->GetRotation());
 }
 
-bool WebRtcVideoFrame::Alias(const CapturedFrame* frame, int dw, int dh) {
+bool WebRtcVideoFrame::Alias(const CapturedFrame* frame,
+                             int dw,
+                             int dh,
+                             bool apply_rotation) {
   if (CanonicalFourCC(frame->fourcc) != FOURCC_I420 ||
-      (frame->GetRotation() != webrtc::kVideoRotation_0) ||
+      (apply_rotation &&
+       frame->GetRotation() != webrtc::kVideoRotation_0) ||
       frame->width != dw || frame->height != dh) {
     // TODO(fbarchard): Enable aliasing of more formats.
     return Init(frame, dw, dh);
diff --git a/talk/media/webrtc/webrtcvideoframe.h b/talk/media/webrtc/webrtcvideoframe.h
index 2493026..4e89897 100644
--- a/talk/media/webrtc/webrtcvideoframe.h
+++ b/talk/media/webrtc/webrtcvideoframe.h
@@ -72,7 +72,10 @@
 
   // Aliases this WebRtcVideoFrame to a CapturedFrame. |frame| must outlive
   // this WebRtcVideoFrame.
-  bool Alias(const CapturedFrame* frame, int dw, int dh);
+  bool Alias(const CapturedFrame* frame,
+             int dw,
+             int dh,
+             bool apply_rotation);
 
   bool InitToBlack(int w, int h, size_t pixel_width, size_t pixel_height,
                    int64_t elapsed_time, int64_t time_stamp);
@@ -129,6 +132,9 @@
   virtual void SetTimeStamp(int64_t time_stamp) { time_stamp_ = time_stamp; }
 
   virtual webrtc::VideoRotation GetVideoRotation() const { return rotation_; }
+  virtual void SetRotation(webrtc::VideoRotation rotation) {
+    rotation_ = rotation;
+  }
 
   virtual VideoFrame* Copy() const;
   virtual bool MakeExclusive();
diff --git a/talk/media/webrtc/webrtcvideoframefactory.cc b/talk/media/webrtc/webrtcvideoframefactory.cc
index 07b6663..2ac5adc 100755
--- a/talk/media/webrtc/webrtcvideoframefactory.cc
+++ b/talk/media/webrtc/webrtcvideoframefactory.cc
@@ -36,7 +36,7 @@
   // TODO(pthatcher): Move Alias logic into the VideoFrameFactory and
   // out of the VideoFrame.
   rtc::scoped_ptr<WebRtcVideoFrame> frame(new WebRtcVideoFrame());
-  if (!frame->Alias(aliased_frame, width, height)) {
+  if (!frame->Alias(aliased_frame, width, height, apply_rotation_)) {
     LOG(LS_ERROR) <<
         "Failed to create WebRtcVideoFrame in CreateAliasedFrame.";
     return NULL;
diff --git a/webrtc/common_video/i420_video_frame.cc b/webrtc/common_video/i420_video_frame.cc
index fdc2bbc..8dbbe3a 100644
--- a/webrtc/common_video/i420_video_frame.cc
+++ b/webrtc/common_video/i420_video_frame.cc
@@ -21,7 +21,9 @@
       height_(0),
       timestamp_(0),
       ntp_time_ms_(0),
-      render_time_ms_(0) {}
+      render_time_ms_(0),
+      rotation_(kVideoRotation_0) {
+}
 
 I420VideoFrame::~I420VideoFrame() {}
 
@@ -42,6 +44,7 @@
   timestamp_ = 0;
   ntp_time_ms_ = 0;
   render_time_ms_ = 0;
+  rotation_ = kVideoRotation_0;
   return 0;
 }
 
@@ -50,6 +53,23 @@
                                 int size_v, const uint8_t* buffer_v,
                                 int width, int height,
                                 int stride_y, int stride_u, int stride_v) {
+  return CreateFrame(size_y, buffer_y, size_u, buffer_u, size_v, buffer_v,
+                     width, height, stride_y, stride_u, stride_v,
+                     kVideoRotation_0);
+}
+
+int I420VideoFrame::CreateFrame(int size_y,
+                                const uint8_t* buffer_y,
+                                int size_u,
+                                const uint8_t* buffer_u,
+                                int size_v,
+                                const uint8_t* buffer_v,
+                                int width,
+                                int height,
+                                int stride_y,
+                                int stride_u,
+                                int stride_v,
+                                VideoRotation rotation) {
   if (size_y < 1 || size_u < 1 || size_v < 1)
     return -1;
   if (CheckDimensions(width, height, stride_y, stride_u, stride_v) < 0)
@@ -59,6 +79,7 @@
   v_plane_.Copy(size_v, stride_v, buffer_v);
   width_ = width;
   height_ = height;
+  rotation_ = rotation;
   return 0;
 }
 
@@ -77,6 +98,7 @@
   timestamp_ = videoFrame.timestamp_;
   ntp_time_ms_ = videoFrame.ntp_time_ms_;
   render_time_ms_ = videoFrame.render_time_ms_;
+  rotation_ = videoFrame.rotation_;
   return 0;
 }
 
@@ -98,6 +120,7 @@
   std::swap(timestamp_, videoFrame->timestamp_);
   std::swap(ntp_time_ms_, videoFrame->ntp_time_ms_);
   std::swap(render_time_ms_, videoFrame->render_time_ms_);
+  std::swap(rotation_, videoFrame->rotation_);
 }
 
 uint8_t* I420VideoFrame::buffer(PlaneType type) {
diff --git a/webrtc/common_video/i420_video_frame_unittest.cc b/webrtc/common_video/i420_video_frame_unittest.cc
index ca01fd0..6c3cb9c 100644
--- a/webrtc/common_video/i420_video_frame_unittest.cc
+++ b/webrtc/common_video/i420_video_frame_unittest.cc
@@ -29,6 +29,7 @@
   I420VideoFrame frame;
   // Invalid arguments - one call for each variable.
   EXPECT_TRUE(frame.IsZeroSize());
+  EXPECT_EQ(kVideoRotation_0, frame.rotation());
   EXPECT_EQ(-1, frame.CreateEmptyFrame(0, 10, 10, 14, 14));
   EXPECT_EQ(-1, frame.CreateEmptyFrame(10, -1, 10, 90, 14));
   EXPECT_EQ(-1, frame.CreateEmptyFrame(10, 10, 0, 14, 18));
@@ -100,22 +101,23 @@
   const int kSizeY = 225;
   const int kSizeU = 80;
   const int kSizeV = 80;
+  const VideoRotation kRotation = VideoFrameRotation_270;
   uint8_t buffer_y[kSizeY];
   uint8_t buffer_u[kSizeU];
   uint8_t buffer_v[kSizeV];
   memset(buffer_y, 16, kSizeY);
   memset(buffer_u, 8, kSizeU);
   memset(buffer_v, 4, kSizeV);
-  frame2.CreateFrame(kSizeY, buffer_y,
-                     kSizeU, buffer_u,
-                     kSizeV, buffer_v,
-                     width + 5, height + 5, stride_y + 5, stride_u, stride_v);
+  frame2.CreateFrame(kSizeY, buffer_y, kSizeU, buffer_u, kSizeV, buffer_v,
+                     width + 5, height + 5, stride_y + 5, stride_u, stride_v,
+                     kRotation);
   // Frame of smaller dimensions - allocated sizes should not vary.
   EXPECT_EQ(0, frame1.CopyFrame(frame2));
   EXPECT_TRUE(EqualFramesExceptSize(frame1, frame2));
   EXPECT_EQ(kSizeY, frame1.allocated_size(kYPlane));
   EXPECT_EQ(kSizeU, frame1.allocated_size(kUPlane));
   EXPECT_EQ(kSizeV, frame1.allocated_size(kVPlane));
+  EXPECT_EQ(kRotation, frame1.rotation());
   // Verify copy of all parameters.
   // Frame of larger dimensions - update allocated sizes.
   EXPECT_EQ(0, frame2.CopyFrame(frame1));
diff --git a/webrtc/common_video/interface/texture_video_frame.h b/webrtc/common_video/interface/texture_video_frame.h
index 2c625ab..9a1fee0 100644
--- a/webrtc/common_video/interface/texture_video_frame.h
+++ b/webrtc/common_video/interface/texture_video_frame.h
@@ -48,6 +48,18 @@
                           int stride_y,
                           int stride_u,
                           int stride_v) OVERRIDE;
+  virtual int CreateFrame(int size_y,
+                          const uint8_t* buffer_y,
+                          int size_u,
+                          const uint8_t* buffer_u,
+                          int size_v,
+                          const uint8_t* buffer_v,
+                          int width,
+                          int height,
+                          int stride_y,
+                          int stride_u,
+                          int stride_v,
+                          webrtc::VideoRotation rotation) OVERRIDE;
   virtual int CopyFrame(const I420VideoFrame& videoFrame) OVERRIDE;
   virtual I420VideoFrame* CloneFrame() const OVERRIDE;
   virtual void SwapFrame(I420VideoFrame* videoFrame) OVERRIDE;
diff --git a/webrtc/common_video/rotation.h b/webrtc/common_video/rotation.h
index 61825d8..46a9ecc 100644
--- a/webrtc/common_video/rotation.h
+++ b/webrtc/common_video/rotation.h
@@ -11,8 +11,6 @@
 #ifndef WEBRTC_COMMON_VIDEO_ROTATION_H_
 #define WEBRTC_COMMON_VIDEO_ROTATION_H_
 
-#include "webrtc/base/common.h"
-
 namespace webrtc {
 
 // enum for clockwise rotation.
diff --git a/webrtc/common_video/texture_video_frame.cc b/webrtc/common_video/texture_video_frame.cc
index f301d19..ce6a040 100644
--- a/webrtc/common_video/texture_video_frame.cc
+++ b/webrtc/common_video/texture_video_frame.cc
@@ -52,6 +52,22 @@
   return -1;
 }
 
+int TextureVideoFrame::CreateFrame(int size_y,
+                                   const uint8_t* buffer_y,
+                                   int size_u,
+                                   const uint8_t* buffer_u,
+                                   int size_v,
+                                   const uint8_t* buffer_v,
+                                   int width,
+                                   int height,
+                                   int stride_y,
+                                   int stride_u,
+                                   int stride_v,
+                                   webrtc::VideoRotation rotation) {
+  assert(false);  // Should not be called.
+  return -1;
+}
+
 int TextureVideoFrame::CopyFrame(const I420VideoFrame& videoFrame) {
   assert(false);  // Should not be called.
   return -1;
diff --git a/webrtc/modules/video_capture/include/mock/mock_video_capture.h b/webrtc/modules/video_capture/include/mock/mock_video_capture.h
index 7938b01..9013820 100644
--- a/webrtc/modules/video_capture/include/mock/mock_video_capture.h
+++ b/webrtc/modules/video_capture/include/mock/mock_video_capture.h
@@ -39,6 +39,8 @@
   MOCK_METHOD1(SetCaptureDelay, void(int32_t delayMS));
   MOCK_METHOD0(CaptureDelay, int32_t());
   MOCK_METHOD1(SetCaptureRotation, int32_t(VideoCaptureRotation rotation));
+  MOCK_METHOD1(SetApplyRotation, bool(bool));
+  MOCK_METHOD0(GetApplyRotation, bool());
   MOCK_METHOD1(GetEncodeInterface,
                VideoCaptureEncodeInterface*(const VideoCodec& codec));
   MOCK_METHOD1(EnableFrameRateCallback, void(const bool enable));
diff --git a/webrtc/modules/video_capture/include/video_capture.h b/webrtc/modules/video_capture/include/video_capture.h
index 50539ea..4f94317 100644
--- a/webrtc/modules/video_capture/include/video_capture.h
+++ b/webrtc/modules/video_capture/include/video_capture.h
@@ -135,6 +135,15 @@
   // displayed correctly if rendered.
   virtual int32_t SetCaptureRotation(VideoCaptureRotation rotation) = 0;
 
+  // Tells the capture module whether to apply the pending rotation. By default,
+  // the rotation is applied and the generated frame is up right. When set to
+  // false, generated frames will carry the rotation information from
+  // SetCaptureRotation. Return value indicates whether this operation succeeds.
+  virtual bool SetApplyRotation(bool enable) = 0;
+
+  // Return whether the rotation is applied or left pending.
+  virtual bool GetApplyRotation() = 0;
+
   // Gets a pointer to an encode interface if the capture device supports the
   // requested type and size.  NULL otherwise.
   virtual VideoCaptureEncodeInterface* GetEncodeInterface(
diff --git a/webrtc/modules/video_capture/video_capture_impl.cc b/webrtc/modules/video_capture/video_capture_impl.cc
index d7a6642..ec88ecd 100644
--- a/webrtc/modules/video_capture/video_capture_impl.cc
+++ b/webrtc/modules/video_capture/video_capture_impl.cc
@@ -24,6 +24,23 @@
 
 namespace webrtc
 {
+
+// Converting the rotation mode from capturemodule's to I420VideoFrame's define.
+VideoRotation ConvertRotation(VideoRotationMode rotation) {
+  switch (rotation) {
+    case kRotateNone:
+      return kVideoRotation_0;
+    case kRotate90:
+      return kVideoRotation_90;
+    case kRotate180:
+      return kVideoRotation_180;
+    case kRotate270:
+      return kVideoRotation_270;
+  }
+  assert(false);
+  return kVideoRotation_0;
+}
+
 namespace videocapturemodule
 {
 VideoCaptureModule* VideoCaptureImpl::Create(
@@ -159,7 +176,8 @@
       last_capture_time_(0),
       delta_ntp_internal_ms_(
           Clock::GetRealTimeClock()->CurrentNtpInMilliseconds() -
-          TickTime::MillisecondTimestamp()) {
+          TickTime::MillisecondTimestamp()),
+      apply_rotation_(true) {
     _requestedCapability.width = kDefaultWidth;
     _requestedCapability.height = kDefaultHeight;
     _requestedCapability.maxFPS = 30;
@@ -278,11 +296,15 @@
         int stride_uv = (width + 1) / 2;
         int target_width = width;
         int target_height = height;
-        // Rotating resolution when for 90/270 degree rotations.
-        if (_rotateFrame == kRotate90 || _rotateFrame == kRotate270)  {
-          target_width = abs(height);
-          target_height = width;
+
+        if (apply_rotation_) {
+          // Rotating resolution when for 90/270 degree rotations.
+          if (_rotateFrame == kRotate90 || _rotateFrame == kRotate270) {
+            target_width = abs(height);
+            target_height = width;
+          }
         }
+
         // TODO(mikhal): Update correct aligned stride values.
         //Calc16ByteAlignedStride(target_width, &stride_y, &stride_uv);
         // Setting absolute height (in case it was negative).
@@ -298,19 +320,24 @@
                              "happen due to bad parameters.";
             return -1;
         }
-        const int conversionResult = ConvertToI420(commonVideoType,
-                                                   videoFrame,
-                                                   0, 0,  // No cropping
-                                                   width, height,
-                                                   videoFrameLength,
-                                                   _rotateFrame,
-                                                   &_captureFrame);
+        const int conversionResult = ConvertToI420(
+            commonVideoType, videoFrame, 0, 0,  // No cropping
+            width, height, videoFrameLength,
+            apply_rotation_ ? _rotateFrame : kRotateNone,
+            &_captureFrame);
         if (conversionResult < 0)
         {
           LOG(LS_ERROR) << "Failed to convert capture frame from type "
                         << frameInfo.rawType << "to I420.";
             return -1;
         }
+
+        if (!apply_rotation_) {
+          _captureFrame.set_rotation(ConvertRotation(_rotateFrame));
+        } else {
+          _captureFrame.set_rotation(kVideoRotation_0);
+        }
+
         DeliverCapturedFrame(_captureFrame, captureTime);
     }
     else // Encoded format
@@ -364,6 +391,14 @@
     }
 }
 
+bool VideoCaptureImpl::SetApplyRotation(bool enable) {
+  CriticalSectionScoped cs(&_apiCs);
+  CriticalSectionScoped cs2(&_callBackCs);
+  // The effect of this is the last caller wins.
+  apply_rotation_ = enable;
+  return true;
+}
+
 void VideoCaptureImpl::EnableNoPictureAlarm(const bool enable) {
     CriticalSectionScoped cs(&_apiCs);
     CriticalSectionScoped cs2(&_callBackCs);
diff --git a/webrtc/modules/video_capture/video_capture_impl.h b/webrtc/modules/video_capture/video_capture_impl.h
index 75fe7e5..aba15f2 100644
--- a/webrtc/modules/video_capture/video_capture_impl.h
+++ b/webrtc/modules/video_capture/video_capture_impl.h
@@ -68,6 +68,10 @@
     virtual void SetCaptureDelay(int32_t delayMS);
     virtual int32_t CaptureDelay();
     virtual int32_t SetCaptureRotation(VideoCaptureRotation rotation);
+    virtual bool SetApplyRotation(bool enable);
+    virtual bool GetApplyRotation() {
+      return apply_rotation_;
+    }
 
     virtual void EnableFrameRateCallback(const bool enable);
     virtual void EnableNoPictureAlarm(const bool enable);
@@ -140,6 +144,9 @@
 
     // Delta used for translating between NTP and internal timestamps.
     const int64_t delta_ntp_internal_ms_;
+
+    // Indicate whether rotation should be applied before delivered externally.
+    bool apply_rotation_;
 };
 }  // namespace videocapturemodule
 }  // namespace webrtc
diff --git a/webrtc/video_frame.h b/webrtc/video_frame.h
index 95bcca0..2dd7c61 100644
--- a/webrtc/video_frame.h
+++ b/webrtc/video_frame.h
@@ -18,6 +18,7 @@
 // used).
 #include "webrtc/system_wrappers/interface/scoped_refptr.h"
 #include "webrtc/typedefs.h"
+#include "webrtc/common_video/rotation.h"
 
 namespace webrtc {
 
@@ -73,6 +74,20 @@
                           int stride_u,
                           int stride_v);
 
+  // TODO(guoweis): remove the previous CreateFrame when chromium has this code.
+  virtual int CreateFrame(int size_y,
+                          const uint8_t* buffer_y,
+                          int size_u,
+                          const uint8_t* buffer_u,
+                          int size_v,
+                          const uint8_t* buffer_v,
+                          int width,
+                          int height,
+                          int stride_y,
+                          int stride_u,
+                          int stride_v,
+                          VideoRotation rotation);
+
   // Copy frame: If required size is bigger than allocated one, new buffers of
   // adequate size will be allocated.
   // Return value: 0 on success, -1 on error.
@@ -122,6 +137,21 @@
   // Get capture ntp time in miliseconds.
   virtual int64_t ntp_time_ms() const { return ntp_time_ms_; }
 
+  // Naming convention for Coordination of Video Orientation. Please see
+  // http://www.etsi.org/deliver/etsi_ts/126100_126199/126114/12.07.00_60/ts_126114v120700p.pdf
+  //
+  // "pending rotation" or "pending" = a frame that has a VideoRotation > 0.
+  //
+  // "not pending" = a frame that has a VideoRotation == 0.
+  //
+  // "apply rotation" = modify a frame from being "pending" to being "not
+  //                    pending" rotation (a no-op for "unrotated").
+  //
+  virtual VideoRotation rotation() const { return rotation_; }
+  virtual void set_rotation(VideoRotation rotation) {
+    rotation_ = rotation;
+  }
+
   // Set render time in miliseconds.
   virtual void set_render_time_ms(int64_t render_time_ms) {
     render_time_ms_ = render_time_ms;
@@ -165,6 +195,7 @@
   uint32_t timestamp_;
   int64_t ntp_time_ms_;
   int64_t render_time_ms_;
+  VideoRotation rotation_;
 };
 
 enum VideoFrameType {