Remove avi recorder and corresponding enable_video flags.

R=mflodman@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/42099004

Cr-Commit-Position: refs/heads/master@{#8554}
git-svn-id: http://webrtc.googlecode.com/svn/trunk@8554 4adac7df-926f-26a2-2b94-8c16560cd09d
diff --git a/webrtc/BUILD.gn b/webrtc/BUILD.gn
index 296d788..04dd0f3 100644
--- a/webrtc/BUILD.gn
+++ b/webrtc/BUILD.gn
@@ -83,10 +83,6 @@
     all_dependent_configs = [ "dbus-glib" ]
   }
 
-  if (rtc_enable_video) {
-    defines += [ "WEBRTC_MODULE_UTILITY_VIDEO" ]
-  }
-
   if (build_with_chromium) {
     defines += [ "LOGGING_INSIDE_WEBRTC" ]
   } else {
diff --git a/webrtc/build/common.gypi b/webrtc/build/common.gypi
index 1472ebf..7369e1d 100644
--- a/webrtc/build/common.gypi
+++ b/webrtc/build/common.gypi
@@ -73,11 +73,6 @@
     # Remote bitrate estimator logging/plotting.
     'enable_bwe_test_logging%': 0,
 
-    # Adds video support to dependencies shared by voice and video engine.
-    # This should normally be enabled; the intended use is to disable only
-    # when building voice engine exclusively.
-    'enable_video%': 1,
-
     # Selects fixed-point code where possible.
     'prefer_fixed_point%': 0,
 
@@ -188,9 +183,6 @@
       ['rtc_relative_path==1', {
         'defines': ['EXPAT_RELATIVE_PATH',],
       }],
-      ['enable_video==1', {
-        'defines': ['WEBRTC_MODULE_UTILITY_VIDEO',],
-      }],
       ['build_with_chromium==1', {
         'defines': [
           # Changes settings for Chromium build.
diff --git a/webrtc/build/webrtc.gni b/webrtc/build/webrtc.gni
index 7577589..85caa55 100644
--- a/webrtc/build/webrtc.gni
+++ b/webrtc/build/webrtc.gni
@@ -26,11 +26,6 @@
   # library that comes with WebRTC (i.e. rtc_build_ssl == 0).
   rtc_ssl_root = ""
 
-  # Adds video support to dependencies shared by voice and video engine.
-  # This should normally be enabled; the intended use is to disable only
-  # when building voice engine exclusively.
-  rtc_enable_video = true
-
   # Selects fixed-point code where possible.
   rtc_prefer_fixed_point = false
 
diff --git a/webrtc/common_types.h b/webrtc/common_types.h
index ece5143..fbf9a6d 100644
--- a/webrtc/common_types.h
+++ b/webrtc/common_types.h
@@ -137,7 +137,6 @@
 {
     kFileFormatWavFile        = 1,
     kFileFormatCompressedFile = 2,
-    kFileFormatAviFile        = 3,
     kFileFormatPreencodedFile = 4,
     kFileFormatPcm16kHzFile   = 7,
     kFileFormatPcm8kHzFile    = 8,
diff --git a/webrtc/modules/audio_device/audio_device_buffer.h b/webrtc/modules/audio_device/audio_device_buffer.h
index 84df559..a89927f 100644
--- a/webrtc/modules/audio_device/audio_device_buffer.h
+++ b/webrtc/modules/audio_device/audio_device_buffer.h
@@ -22,7 +22,6 @@
 const uint32_t kMaxBufferSizeBytes = 3840; // 10ms in stereo @ 96kHz
 
 class AudioDeviceObserver;
-class MediaFile;
 
 class AudioDeviceBuffer
 {
diff --git a/webrtc/modules/media_file/BUILD.gn b/webrtc/modules/media_file/BUILD.gn
index 9cb15ff..05cfb4e 100644
--- a/webrtc/modules/media_file/BUILD.gn
+++ b/webrtc/modules/media_file/BUILD.gn
@@ -16,8 +16,6 @@
   sources = [
     "interface/media_file.h",
     "interface/media_file_defines.h",
-    "source/avi_file.cc",
-    "source/avi_file.h",
     "source/media_file_impl.cc",
     "source/media_file_impl.h",
     "source/media_file_utility.cc",
diff --git a/webrtc/modules/media_file/interface/media_file.h b/webrtc/modules/media_file/interface/media_file.h
index 0179ee6..5b09ad4 100644
--- a/webrtc/modules/media_file/interface/media_file.h
+++ b/webrtc/modules/media_file/interface/media_file.h
@@ -38,14 +38,6 @@
         int8_t* audioBuffer,
         size_t& dataLengthInBytes) = 0;
 
-    // Put one video frame into videoBuffer. dataLengthInBytes is both an input
-    // and output parameter. As input parameter it indicates the size of
-    // videoBuffer. As output parameter it indicates the number of bytes written
-    // to videoBuffer.
-    virtual int32_t PlayoutAVIVideoData(
-        int8_t* videoBuffer,
-        size_t& dataLengthInBytes) = 0;
-
     // Put 10-60ms, depending on codec frame size, of audio data from file into
     // audioBufferLeft and audioBufferRight. The buffers contain the left and
     // right channel of played out stereo audio.
@@ -82,16 +74,6 @@
         const uint32_t startPointMs       = 0,
         const uint32_t stopPointMs        = 0) = 0;
 
-    // Open the file specified by fileName for reading (relative path is
-    // allowed). If loop is true the file will be played until StopPlaying() is
-    // called. When end of file is reached the file is read from the start.
-    // format specifies the type of file fileName refers to. Only video will be
-    // read if videoOnly is true.
-    virtual int32_t StartPlayingVideoFile(const char* fileName,
-                                                const bool loop,
-                                                bool videoOnly,
-                                                const FileFormats format) = 0;
-
     // Prepare for playing audio from stream.
     // FileCallback::PlayNotification(..) will be called after
     // notificationTimeMs of the file has been played if notificationTimeMs is
@@ -130,16 +112,6 @@
         const int8_t* audioBuffer,
         const size_t bufferLength) = 0;
 
-    // Write one video frame, i.e. the bufferLength first bytes of videoBuffer,
-    // to file.
-    // Note: videoBuffer can contain encoded data. The codec used must be the
-    // same as what was specified by videoCodecInst for the last successfull
-    // StartRecordingVideoFile(..) call. The videoBuffer must contain exactly
-    // one video frame.
-    virtual int32_t IncomingAVIVideoData(
-        const int8_t* videoBuffer,
-        const size_t bufferLength) = 0;
-
     // Open/creates file specified by fileName for writing (relative path is
     // allowed). FileCallback::RecordNotification(..) will be called after
     // notificationTimeMs of audio data has been recorded if
@@ -157,18 +129,6 @@
         const uint32_t notificationTimeMs = 0,
         const uint32_t maxSizeBytes       = 0) = 0;
 
-    // Open/create the file specified by fileName for writing audio/video data
-    // (relative path is allowed). format specifies the type of file fileName
-    // should be. codecInst specifies the encoding of the audio data.
-    // videoCodecInst specifies the encoding of the video data. Only video data
-    // will be recorded if videoOnly is true.
-    virtual int32_t StartRecordingVideoFile(
-        const char* fileName,
-        const FileFormats   format,
-        const CodecInst&    codecInst,
-        const VideoCodec&   videoCodecInst,
-        bool videoOnly = false) = 0;
-
     // Prepare for recording audio to stream.
     // FileCallback::RecordNotification(..) will be called after
     // notificationTimeMs of audio data has been recorded if
@@ -212,10 +172,6 @@
     // reading or writing.
     virtual int32_t codec_info(CodecInst& codecInst) const = 0;
 
-    // Update videoCodecInst according to the current video codec being used for
-    // reading or writing.
-    virtual int32_t VideoCodecInst(VideoCodec& videoCodecInst) const = 0;
-
 protected:
     MediaFile() {}
     virtual ~MediaFile() {}
diff --git a/webrtc/modules/media_file/media_file.gypi b/webrtc/modules/media_file/media_file.gypi
index 6ff96e1..4ec80c3 100644
--- a/webrtc/modules/media_file/media_file.gypi
+++ b/webrtc/modules/media_file/media_file.gypi
@@ -19,8 +19,6 @@
       'sources': [
         'interface/media_file.h',
         'interface/media_file_defines.h',
-        'source/avi_file.cc',
-        'source/avi_file.h',
         'source/media_file_impl.cc',
         'source/media_file_impl.h',
         'source/media_file_utility.cc',
diff --git a/webrtc/modules/media_file/source/avi_file.cc b/webrtc/modules/media_file/source/avi_file.cc
deleted file mode 100644
index 0d0fefd..0000000
--- a/webrtc/modules/media_file/source/avi_file.cc
+++ /dev/null
@@ -1,1747 +0,0 @@
-/*
- *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-// TODO(henrike): reassess the error handling in this class. Currently failure
-// is detected by asserts in many places. Also a refactoring of this class would
-// be beneficial.
-
-#include "webrtc/modules/media_file/source/avi_file.h"
-
-#include <assert.h>
-#include <string.h>
-
-#ifdef _WIN32
-#include <windows.h>
-#endif
-
-#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/interface/file_wrapper.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-
-// http://msdn2.microsoft.com/en-us/library/ms779636.aspx
-// A chunk has the following form:
-// ckID ckSize ckData
-// where ckID is a FOURCC that identifies the data contained in the
-// chunk, ckData is a 4-byte value giving the size of the data in
-// ckData, and ckData is zero or more bytes of data. The data is always
-// padded to nearest WORD boundary. ckSize gives the size of the valid
-// data in the chunk; it does not include the padding, the size of
-// ckID, or the size of ckSize.
-//http://msdn2.microsoft.com/en-us/library/ms779632.aspx
-//NOTE: Workaround to make MPEG4 files play on WMP. MPEG files
-//      place the config parameters efter the BITMAPINFOHEADER and
-//      *NOT* in the 'strd'!
-// http://msdn.microsoft.com/en-us/library/dd183375.aspx
-// http://msdn.microsoft.com/en-us/library/dd183376.aspx
-
-namespace webrtc {
-namespace {
-static const uint32_t kAvifHasindex       = 0x00000010;
-static const uint32_t kAvifMustuseindex   = 0x00000020;
-static const uint32_t kAvifIsinterleaved  = 0x00000100;
-static const uint32_t kAvifTrustcktype    = 0x00000800;
-static const uint32_t kAvifWascapturefile = 0x00010000;
-
-template <class T>
-T MinValue(T a, T b)
-{
-    return a < b ? a : b;
-}
-}  // namespace
-
-AviFile::AVIMAINHEADER::AVIMAINHEADER()
-    : fcc(                  0),
-      cb(                   0),
-      dwMicroSecPerFrame(   0),
-      dwMaxBytesPerSec(     0),
-      dwPaddingGranularity( 0),
-      dwFlags(              0),
-      dwTotalFrames(        0),
-      dwInitialFrames(      0),
-      dwStreams(            0),
-      dwSuggestedBufferSize(0),
-      dwWidth(              0),
-      dwHeight(             0)
-{
-    dwReserved[0] = 0;
-    dwReserved[1] = 0;
-    dwReserved[2] = 0;
-    dwReserved[3] = 0;
-}
-
-AVISTREAMHEADER::AVISTREAMHEADER()
-    : fcc(                  0),
-      cb(                   0),
-      fccType(              0),
-      fccHandler(           0),
-      dwFlags(              0),
-      wPriority(            0),
-      wLanguage(            0),
-      dwInitialFrames(      0),
-      dwScale(              0),
-      dwRate(               0),
-      dwStart(              0),
-      dwLength(             0),
-      dwSuggestedBufferSize(0),
-      dwQuality(            0),
-      dwSampleSize(         0)
-{
-    rcFrame.left   = 0;
-    rcFrame.top    = 0;
-    rcFrame.right  = 0;
-    rcFrame.bottom = 0;
-}
-
-BITMAPINFOHEADER::BITMAPINFOHEADER()
-    : biSize(         0),
-      biWidth(        0),
-      biHeight(       0),
-      biPlanes(       0),
-      biBitCount(     0),
-      biCompression(  0),
-      biSizeImage(    0),
-      biXPelsPerMeter(0),
-      biYPelsPerMeter(0),
-      biClrUsed(      0),
-      biClrImportant( 0)
-{
-}
-
-WAVEFORMATEX::WAVEFORMATEX()
-    : wFormatTag(     0),
-      nChannels(      0),
-      nSamplesPerSec( 0),
-      nAvgBytesPerSec(0),
-      nBlockAlign(    0),
-      wBitsPerSample( 0),
-      cbSize(         0)
-{
-}
-
-AviFile::AVIINDEXENTRY::AVIINDEXENTRY(uint32_t inckid,
-                                      uint32_t indwFlags,
-                                      uint32_t indwChunkOffset,
-                                      uint32_t indwChunkLength)
-    : ckid(inckid),
-      dwFlags(indwFlags),
-      dwChunkOffset(indwChunkOffset),
-      dwChunkLength(indwChunkLength)
-{
-}
-
-AviFile::AviFile()
-    : _crit(CriticalSectionWrapper::CreateCriticalSection()),
-      _aviFile(NULL),
-      _aviHeader(),
-      _videoStreamHeader(),
-      _audioStreamHeader(),
-      _videoFormatHeader(),
-      _audioFormatHeader(),
-      _videoConfigParameters(),
-      _videoConfigLength(0),
-      _videoStreamName(),
-      _audioConfigParameters(),
-      _audioStreamName(),
-      _videoStream(),
-      _audioStream(),
-      _nrStreams(0),
-      _aviLength(0),
-      _dataLength(0),
-      _bytesRead(0),
-      _dataStartByte(0),
-      _framesRead(0),
-      _videoFrames(0),
-      _audioFrames(0),
-      _reading(false),
-      _openedAs(AVI_AUDIO),
-      _loop(false),
-      _writing(false),
-      _bytesWritten(0),
-      _riffSizeMark(0),
-      _moviSizeMark(0),
-      _totNumFramesMark(0),
-      _videoStreamLengthMark(0),
-      _audioStreamLengthMark(0),
-      _moviListOffset(0),
-      _writeAudioStream(false),
-      _writeVideoStream(false),
-      _aviMode(NotSet),
-      _videoCodecConfigParams(NULL),
-      _videoCodecConfigParamsLength(0),
-      _videoStreamDataChunkPrefix(0),
-      _audioStreamDataChunkPrefix(0),
-      _created(false)
-{
-  ResetComplexMembers();
-}
-
-AviFile::~AviFile()
-{
-    Close();
-
-    delete[] _videoCodecConfigParams;
-    delete _crit;
-}
-
-int32_t AviFile::Open(AVIStreamType streamType, const char* fileName, bool loop)
-{
-    WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, -1,  "OpenAVIFile(%s)",
-                 fileName);
-    _crit->Enter();
-
-    if (_aviMode != NotSet)
-    {
-        _crit->Leave();
-        return -1;
-    }
-
-    _aviMode = Read;
-
-    if (!fileName)
-    {
-        _crit->Leave();
-        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,  "\tfileName not valid!");
-        return -1;
-    }
-
-#ifdef _WIN32
-    // fopen does not support wide characters on Windows, ergo _wfopen.
-    wchar_t wideFileName[FileWrapper::kMaxFileNameSize];
-    wideFileName[0] = 0;
-    MultiByteToWideChar(CP_UTF8,0,fileName, -1, // convert the whole string
-                        wideFileName, FileWrapper::kMaxFileNameSize);
-
-    _aviFile = _wfopen(wideFileName, L"rb");
-#else
-    _aviFile = fopen(fileName, "rb");
-#endif
-
-    if (!_aviFile)
-    {
-        _crit->Leave();
-        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,  "Could not open file!");
-        return -1;
-    }
-
-    // ReadRIFF verifies that the file is AVI and figures out the file length.
-    int32_t err = ReadRIFF();
-    if (err)
-    {
-        if (_aviFile)
-        {
-            fclose(_aviFile);
-            _aviFile = NULL;
-        }
-        _crit->Leave();
-        return -1;
-    }
-
-   err = ReadHeaders();
-    if (err)
-    {
-        if (_aviFile)
-        {
-            fclose(_aviFile);
-            _aviFile = NULL;
-        }
-        _crit->Leave();
-        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
-                     "Unsupported or corrupt AVI format");
-        return -1;
-    }
-
-    _dataStartByte = _bytesRead;
-    _reading = true;
-    _openedAs = streamType;
-    _loop = loop;
-    _crit->Leave();
-    return 0;
-}
-
-int32_t AviFile::Close()
-{
-    _crit->Enter();
-    switch (_aviMode)
-    {
-    case Read:
-        CloseRead();
-        break;
-    case Write:
-        CloseWrite();
-        break;
-    default:
-        break;
-    }
-
-    if (_videoCodecConfigParams)
-    {
-        delete [] _videoCodecConfigParams;
-        _videoCodecConfigParams = 0;
-    }
-    ResetMembers();
-    _crit->Leave();
-    return 0;
-}
-
-uint32_t AviFile::MakeFourCc(uint8_t ch0, uint8_t ch1, uint8_t ch2, uint8_t ch3)
-{
-    return ((uint32_t)(uint8_t)(ch0)         |
-            ((uint32_t)(uint8_t)(ch1) << 8)  |
-            ((uint32_t)(uint8_t)(ch2) << 16) |
-            ((uint32_t)(uint8_t)(ch3) << 24 ));
-}
-
-int32_t AviFile::GetVideoStreamInfo(AVISTREAMHEADER& videoStreamHeader,
-                                    BITMAPINFOHEADER& bitmapInfo,
-                                    char* codecConfigParameters,
-                                    int32_t& configLength)
-{
-    _crit->Enter();
-    if (!_reading && !_created)
-    {
-        _crit->Leave();
-        return -1;
-    }
-
-    memcpy(&videoStreamHeader, &_videoStreamHeader, sizeof(_videoStreamHeader));
-    memcpy(&bitmapInfo, &_videoFormatHeader, sizeof(_videoFormatHeader));
-
-    if (configLength <= _videoConfigLength)
-    {
-        memcpy(codecConfigParameters, _videoConfigParameters,
-               _videoConfigLength);
-        configLength = _videoConfigLength;
-    }
-    else
-    {
-        configLength = 0;
-    }
-    _crit->Leave();
-    return 0;
-}
-
-int32_t AviFile::GetDuration(int32_t& durationMs)
-{
-    _crit->Enter();
-    if (_videoStreamHeader.dwRate==0 || _videoStreamHeader.dwScale==0)
-    {
-        _crit->Leave();
-        return -1;
-    }
-
-    durationMs = _videoStreamHeader.dwLength * 1000 /
-        (_videoStreamHeader.dwRate/_videoStreamHeader.dwScale);
-    _crit->Leave();
-    return 0;
-}
-
-int32_t AviFile::GetAudioStreamInfo(WAVEFORMATEX& waveHeader)
-{
-    _crit->Enter();
-    if (_aviMode != Read)
-    {
-        _crit->Leave();
-        return -1;
-    }
-    if (!_reading && !_created)
-    {
-        _crit->Leave();
-        return -1;
-    }
-    memcpy(&waveHeader, &_audioFormatHeader, sizeof(_audioFormatHeader));
-    _crit->Leave();
-    return 0;
-}
-
-int32_t AviFile::WriteAudio(const uint8_t* data, size_t length)
-{
-    _crit->Enter();
-    size_t newBytesWritten = _bytesWritten;
-
-    if (_aviMode != Write)
-    {
-        _crit->Leave();
-        return -1;
-    }
-    if (!_created)
-    {
-        _crit->Leave();
-        return -1;
-    }
-    if (!_writeAudioStream)
-    {
-        _crit->Leave();
-        return -1;
-    }
-
-    // Start of chunk.
-    const uint32_t chunkOffset = ftell(_aviFile) - _moviListOffset;
-    _bytesWritten += PutLE32(_audioStreamDataChunkPrefix);
-    // Size is unknown at this point. Update later.
-    _bytesWritten += PutLE32(0);
-    const size_t chunkSizeMark = _bytesWritten;
-
-    _bytesWritten += PutBuffer(data, length);
-
-    const long chunkSize = PutLE32LengthFromCurrent(
-        static_cast<long>(chunkSizeMark));
-
-    // Make sure that the chunk is aligned on 2 bytes (= 1 sample).
-    if (chunkSize % 2)
-    {
-        _bytesWritten += PutByte(0);
-    }
-    // End of chunk
-
-    // Save chunk information for use when closing file.
-    AddChunkToIndexList(_audioStreamDataChunkPrefix, 0, // No flags.
-                        chunkOffset, chunkSize);
-
-    ++_audioFrames;
-    newBytesWritten = _bytesWritten - newBytesWritten;
-    _crit->Leave();
-    return static_cast<int32_t>(newBytesWritten);
-}
-
-int32_t AviFile::WriteVideo(const uint8_t* data, size_t length)
-{
-    _crit->Enter();
-    size_t newBytesWritten = _bytesWritten;
-    if (_aviMode != Write)
-    {
-        _crit->Leave();
-        return -1;
-    }
-    if (!_created)
-    {
-        _crit->Leave();
-        return -1;
-    }
-    if (!_writeVideoStream)
-    {
-        _crit->Leave();
-        return -1;
-    }
-
-    // Start of chunk.
-    const uint32_t chunkOffset = ftell(_aviFile) - _moviListOffset;
-    _bytesWritten += PutLE32(_videoStreamDataChunkPrefix);
-    // Size is unknown at this point. Update later.
-    _bytesWritten += PutLE32(0);
-    const size_t chunkSizeMark = _bytesWritten;
-
-    _bytesWritten += PutBuffer(data, length);
-
-    const long chunkSize = PutLE32LengthFromCurrent(
-        static_cast<long>(chunkSizeMark));
-
-    // Make sure that the chunk is aligned on 2 bytes (= 1 sample).
-    if (chunkSize % 2)
-    {
-        //Pad one byte, to WORD align.
-        _bytesWritten += PutByte(0);
-    }
-     //End chunk!
-    AddChunkToIndexList(_videoStreamDataChunkPrefix, 0, // No flags.
-                        chunkOffset, static_cast<uint32_t>(chunkSize));
-
-    ++_videoFrames;
-    newBytesWritten = _bytesWritten - newBytesWritten;
-    _crit->Leave();
-    return static_cast<int32_t>(newBytesWritten);
-}
-
-int32_t AviFile::PrepareDataChunkHeaders()
-{
-    // 00 video stream, 01 audio stream.
-    // db uncompresses video,  dc compressed video, wb WAV audio
-    if (_writeVideoStream)
-    {
-        if (strncmp((const char*) &_videoStreamHeader.fccHandler, "I420", 4) ==
-            0)
-        {
-            _videoStreamDataChunkPrefix = MakeFourCc('0', '0', 'd', 'b');
-        }
-        else
-        {
-            _videoStreamDataChunkPrefix = MakeFourCc('0', '0', 'd', 'c');
-        }
-        _audioStreamDataChunkPrefix = MakeFourCc('0', '1', 'w', 'b');
-    }
-    else
-    {
-        _audioStreamDataChunkPrefix = MakeFourCc('0', '0', 'w', 'b');
-    }
-    return 0;
-}
-
-int32_t AviFile::ReadMoviSubChunk(uint8_t* data, size_t& length, uint32_t tag1,
-                                  uint32_t tag2)
-{
-    if (!_reading)
-    {
-        WEBRTC_TRACE(kTraceDebug, kTraceVideo, -1,
-                     "AviFile::ReadMoviSubChunk(): File not open!");
-        length = 0;
-        return -1;
-    }
-
-    uint32_t size;
-    bool isEOFReached = false;
-    // Try to read one data chunk header
-    while (true)
-    {
-        // TODO (hellner): what happens if an empty AVI file is opened with
-        // _loop set to true? Seems like this while-loop would never exit!
-
-        // tag = db uncompresses video,  dc compressed video or wb WAV audio.
-        uint32_t tag;
-        _bytesRead += GetLE32(tag);
-        _bytesRead += GetLE32(size);
-
-        const int32_t eof = feof(_aviFile);
-        if (!eof)
-        {
-            if (tag == tag1)
-            {
-                // Supported tag found.
-                break;
-            }
-            else if ((tag == tag2) && (tag2 != 0))
-            {
-                // Supported tag found.
-                break;
-            }
-
-            // Jump to next chunk. The size is in bytes but chunks are aligned
-            // on 2 byte boundaries.
-            const uint32_t seekSize = (size % 2) ? size + 1 : size;
-            const int32_t err = fseek(_aviFile, seekSize, SEEK_CUR);
-
-            if (err)
-            {
-                isEOFReached = true;
-            }
-        }
-        else
-        {
-            isEOFReached = true;
-        }
-
-        if (isEOFReached)
-        {
-            clearerr(_aviFile);
-
-            if (_loop)
-            {
-                WEBRTC_TRACE(kTraceDebug, kTraceVideo, -1,
-                             "AviFile::ReadMoviSubChunk(): Reached end of AVI\
-                              data file, starting from the beginning.");
-
-                fseek(_aviFile, static_cast<long>(_dataStartByte), SEEK_SET);
-
-                _bytesRead = _dataStartByte;
-                _framesRead = 0;
-                isEOFReached = false;
-            }
-            else
-            {
-                WEBRTC_TRACE(kTraceDebug, kTraceVideo, -1,
-                             "AviFile::ReadMoviSubChunk(): Reached end of AVI\
-                             file!");
-                length = 0;
-                return -1;
-            }
-        }
-        _bytesRead += size;
-    }
-
-    if (size > length)
-    {
-        WEBRTC_TRACE(kTraceDebug, kTraceVideo, -1,
-                     "AviFile::ReadMoviSubChunk(): AVI read buffer too small!");
-
-        // Jump to next chunk. The size is in bytes but chunks are aligned
-        // on 2 byte boundaries.
-        const uint32_t seekSize = (size % 2) ? size + 1 : size;
-        fseek(_aviFile, seekSize, SEEK_CUR);
-        _bytesRead += seekSize;
-        length = 0;
-        return -1;
-    }
-    _bytesRead += GetBuffer(data, size);
-
-    // The size is in bytes but chunks are aligned on 2 byte boundaries.
-    if (size % 2)
-    {
-        uint8_t dummy_byte;
-        _bytesRead += GetByte(dummy_byte);
-    }
-    length = size;
-    ++_framesRead;
-    return 0;
-}
-
-int32_t AviFile::ReadAudio(uint8_t* data, size_t& length)
-{
-    _crit->Enter();
-    WEBRTC_TRACE(kTraceDebug, kTraceVideo, -1,  "AviFile::ReadAudio()");
-
-    if (_aviMode != Read)
-    {
-        _crit->Leave();
-        return -1;
-    }
-    if (_openedAs != AVI_AUDIO)
-    {
-        length = 0;
-        _crit->Leave();
-        WEBRTC_TRACE(kTraceDebug, kTraceVideo, -1,  "File not open as audio!");
-        return -1;
-    }
-
-    const int32_t ret = ReadMoviSubChunk(
-        data,
-        length,
-        StreamAndTwoCharCodeToTag(_audioStream.streamNumber, "wb"));
-
-    _crit->Leave();
-    return ret;
-}
-
-int32_t AviFile::ReadVideo(uint8_t* data, size_t& length)
-{
-    WEBRTC_TRACE(kTraceDebug, kTraceVideo, -1, "AviFile::ReadVideo()");
-
-    _crit->Enter();
-    if (_aviMode != Read)
-    {
-        //Has to be Read!
-        _crit->Leave();
-        return -1;
-    }
-    if (_openedAs != AVI_VIDEO)
-    {
-        length = 0;
-        _crit->Leave();
-        WEBRTC_TRACE(kTraceDebug, kTraceVideo, -1, "File not open as video!");
-        return -1;
-    }
-
-    const int32_t ret = ReadMoviSubChunk(
-        data,
-        length,
-        StreamAndTwoCharCodeToTag(_videoStream.streamNumber, "dc"),
-        StreamAndTwoCharCodeToTag(_videoStream.streamNumber, "db"));
-    _crit->Leave();
-    return ret;
-}
-
-int32_t AviFile::Create(const char* fileName)
-{
-    _crit->Enter();
-    if (_aviMode != Write)
-    {
-        _crit->Leave();
-        return -1;
-    }
-
-    if (!_writeVideoStream && !_writeAudioStream)
-    {
-        _crit->Leave();
-        return -1;
-    }
-    if (_created)
-    {
-        _crit->Leave();
-        return -1;
-    }
-
-#ifdef _WIN32
-    // fopen does not support wide characters on Windows, ergo _wfopen.
-    wchar_t wideFileName[FileWrapper::kMaxFileNameSize];
-    wideFileName[0] = 0;
-
-    MultiByteToWideChar(CP_UTF8,0,fileName, -1, // convert the whole string
-                        wideFileName, FileWrapper::kMaxFileNameSize);
-
-    _aviFile = _wfopen(wideFileName, L"w+b");
-    if (!_aviFile)
-    {
-        _crit->Leave();
-        return -1;
-    }
-#else
-    _aviFile = fopen(fileName, "w+b");
-    if (!_aviFile)
-    {
-        _crit->Leave();
-        return -1;
-    }
-#endif
-
-    WriteRIFF();
-    WriteHeaders();
-
-    _created = true;
-
-    PrepareDataChunkHeaders();
-    ClearIndexList();
-    WriteMoviStart();
-    _aviMode = Write;
-    _crit->Leave();
-    return 0;
-}
-
-int32_t AviFile::CreateVideoStream(
-    const AVISTREAMHEADER& videoStreamHeader,
-    const BITMAPINFOHEADER& bitMapInfoHeader,
-    const uint8_t* codecConfigParams,
-    int32_t codecConfigParamsLength)
-{
-    _crit->Enter();
-    if (_aviMode == Read)
-    {
-        _crit->Leave();
-        return -1;
-    }
-
-    if (_created)
-    {
-        _crit->Leave();
-        return -1;
-    }
-
-    _aviMode = Write;
-    _writeVideoStream = true;
-
-    _videoStreamHeader = videoStreamHeader;
-    _videoFormatHeader = bitMapInfoHeader;
-
-    if (codecConfigParams && codecConfigParamsLength > 0)
-    {
-        if (_videoCodecConfigParams)
-        {
-            delete [] _videoCodecConfigParams;
-            _videoCodecConfigParams = 0;
-        }
-
-        _videoCodecConfigParams = new uint8_t[codecConfigParamsLength];
-        _videoCodecConfigParamsLength = codecConfigParamsLength;
-
-        memcpy(_videoCodecConfigParams, codecConfigParams,
-               _videoCodecConfigParamsLength);
-    }
-    _crit->Leave();
-    return 0;
-}
-
-int32_t AviFile::CreateAudioStream(
-    const AVISTREAMHEADER& audioStreamHeader,
-    const WAVEFORMATEX& waveFormatHeader)
-{
-    _crit->Enter();
-
-    if (_aviMode == Read)
-    {
-        _crit->Leave();
-        return -1;
-    }
-
-    if (_created)
-    {
-        _crit->Leave();
-        return -1;
-    }
-
-    _aviMode = Write;
-    _writeAudioStream = true;
-    _audioStreamHeader = audioStreamHeader;
-    _audioFormatHeader = waveFormatHeader;
-    _crit->Leave();
-    return 0;
-}
-
-int32_t AviFile::WriteRIFF()
-{
-    const uint32_t riffTag = MakeFourCc('R', 'I', 'F', 'F');
-    _bytesWritten += PutLE32(riffTag);
-
-    // Size is unknown at this point. Update later.
-    _bytesWritten += PutLE32(0);
-    _riffSizeMark = _bytesWritten;
-
-    const uint32_t aviTag = MakeFourCc('A', 'V', 'I', ' ');
-    _bytesWritten += PutLE32(aviTag);
-
-    return 0;
-}
-
-
-int32_t AviFile::WriteHeaders()
-{
-    // Main AVI header list.
-    const uint32_t listTag = MakeFourCc('L', 'I', 'S', 'T');
-    _bytesWritten += PutLE32(listTag);
-
-    // Size is unknown at this point. Update later.
-    _bytesWritten += PutLE32(0);
-    const size_t listhdrlSizeMark = _bytesWritten;
-
-    const uint32_t hdrlTag = MakeFourCc('h', 'd', 'r', 'l');
-    _bytesWritten += PutLE32(hdrlTag);
-
-    WriteAVIMainHeader();
-    WriteAVIStreamHeaders();
-
-    const long hdrlLen = PutLE32LengthFromCurrent(
-        static_cast<long>(listhdrlSizeMark));
-
-    // Junk chunk to align on 2048 boundry (CD-ROM sector boundary).
-    const uint32_t junkTag = MakeFourCc('J', 'U', 'N', 'K');
-    _bytesWritten += PutLE32(junkTag);
-    // Size is unknown at this point. Update later.
-    _bytesWritten += PutLE32(0);
-    const size_t junkSizeMark = _bytesWritten;
-
-    const uint32_t junkBufferSize =
-        0x800     // 2048 byte alignment
-        - 12      // RIFF SIZE 'AVI '
-        - 8       // LIST SIZE
-        - hdrlLen //
-        - 8       // JUNK SIZE
-        - 12;     // LIST SIZE 'MOVI'
-
-    // TODO (hellner): why not just fseek here?
-    uint8_t* junkBuffer = new uint8_t[junkBufferSize];
-    memset(junkBuffer, 0, junkBufferSize);
-    _bytesWritten += PutBuffer(junkBuffer, junkBufferSize);
-    delete [] junkBuffer;
-
-    PutLE32LengthFromCurrent(static_cast<long>(junkSizeMark));
-    // End of JUNK chunk.
-    // End of main AVI header list.
-    return 0;
-}
-
-int32_t AviFile::WriteAVIMainHeader()
-{
-    const uint32_t avihTag = MakeFourCc('a', 'v', 'i', 'h');
-    _bytesWritten += PutLE32(avihTag);
-    _bytesWritten += PutLE32(14 * sizeof(uint32_t));
-
-    const uint32_t scale = _videoStreamHeader.dwScale ?
-        _videoStreamHeader.dwScale : 1;
-    const uint32_t microSecPerFrame = 1000000 /
-        (_videoStreamHeader.dwRate / scale);
-    _bytesWritten += PutLE32(microSecPerFrame);
-    _bytesWritten += PutLE32(0);
-    _bytesWritten += PutLE32(0);
-
-    uint32_t numStreams = 0;
-    if (_writeVideoStream)
-    {
-        ++numStreams;
-    }
-    if (_writeAudioStream)
-    {
-        ++numStreams;
-    }
-
-    if (numStreams == 1)
-    {
-        _bytesWritten += PutLE32(
-            kAvifTrustcktype
-            | kAvifHasindex
-            | kAvifWascapturefile);
-    }
-    else
-    {
-        _bytesWritten += PutLE32(
-            kAvifTrustcktype
-            | kAvifHasindex
-            | kAvifWascapturefile
-            | kAvifIsinterleaved);
-    }
-
-    _totNumFramesMark = _bytesWritten;
-    _bytesWritten += PutLE32(0);
-    _bytesWritten += PutLE32(0);
-    _bytesWritten += PutLE32(numStreams);
-
-    if (_writeVideoStream)
-    {
-        _bytesWritten += PutLE32(
-            _videoStreamHeader.dwSuggestedBufferSize);
-        _bytesWritten += PutLE32(
-            _videoStreamHeader.rcFrame.right-_videoStreamHeader.rcFrame.left);
-        _bytesWritten += PutLE32(
-            _videoStreamHeader.rcFrame.bottom-_videoStreamHeader.rcFrame.top);
-    } else {
-        _bytesWritten += PutLE32(0);
-        _bytesWritten += PutLE32(0);
-        _bytesWritten += PutLE32(0);
-    }
-    _bytesWritten += PutLE32(0);
-    _bytesWritten += PutLE32(0);
-    _bytesWritten += PutLE32(0);
-    _bytesWritten += PutLE32(0);
-    return 0;
-}
-
-int32_t AviFile::WriteAVIStreamHeaders()
-{
-    if (_writeVideoStream)
-    {
-        WriteAVIVideoStreamHeaders();
-    }
-    if (_writeAudioStream)
-    {
-        WriteAVIAudioStreamHeaders();
-    }
-    return 0;
-}
-
-int32_t AviFile::WriteAVIVideoStreamHeaders()
-{
-    const uint32_t listTag = MakeFourCc('L', 'I', 'S', 'T');
-    _bytesWritten += PutLE32(listTag);
-
-    // Size is unknown at this point. Update later.
-    _bytesWritten += PutLE32(0);
-    const size_t liststrlSizeMark = _bytesWritten;
-
-    const uint32_t hdrlTag = MakeFourCc('s', 't', 'r', 'l');
-    _bytesWritten += PutLE32(hdrlTag);
-
-    WriteAVIVideoStreamHeaderChunks();
-
-    PutLE32LengthFromCurrent(static_cast<long>(liststrlSizeMark));
-
-    return 0;
-}
-
-int32_t AviFile::WriteAVIVideoStreamHeaderChunks()
-{
-    // Start of strh
-    const uint32_t strhTag = MakeFourCc('s', 't', 'r', 'h');
-    _bytesWritten += PutLE32(strhTag);
-
-    // Size is unknown at this point. Update later.
-    _bytesWritten += PutLE32(0);
-    const size_t strhSizeMark = _bytesWritten;
-
-    _bytesWritten += PutLE32(_videoStreamHeader.fccType);
-    _bytesWritten += PutLE32(_videoStreamHeader.fccHandler);
-    _bytesWritten += PutLE32(_videoStreamHeader.dwFlags);
-    _bytesWritten += PutLE16(_videoStreamHeader.wPriority);
-    _bytesWritten += PutLE16(_videoStreamHeader.wLanguage);
-    _bytesWritten += PutLE32(_videoStreamHeader.dwInitialFrames);
-    _bytesWritten += PutLE32(_videoStreamHeader.dwScale);
-    _bytesWritten += PutLE32(_videoStreamHeader.dwRate);
-    _bytesWritten += PutLE32(_videoStreamHeader.dwStart);
-
-    _videoStreamLengthMark = _bytesWritten;
-    _bytesWritten += PutLE32(_videoStreamHeader.dwLength);
-
-    _bytesWritten += PutLE32(_videoStreamHeader.dwSuggestedBufferSize);
-    _bytesWritten += PutLE32(_videoStreamHeader.dwQuality);
-    _bytesWritten += PutLE32(_videoStreamHeader.dwSampleSize);
-    _bytesWritten += PutLE16(_videoStreamHeader.rcFrame.left);
-    _bytesWritten += PutLE16(_videoStreamHeader.rcFrame.top);
-    _bytesWritten += PutLE16(_videoStreamHeader.rcFrame.right);
-    _bytesWritten += PutLE16(_videoStreamHeader.rcFrame.bottom);
-
-    PutLE32LengthFromCurrent(static_cast<long>(strhSizeMark));
-    // End of strh
-
-    // Start of strf
-    const uint32_t strfTag = MakeFourCc('s', 't', 'r', 'f');
-    _bytesWritten += PutLE32(strfTag);
-
-    // Size is unknown at this point. Update later.
-    _bytesWritten += PutLE32(0);
-    const size_t strfSizeMark = _bytesWritten;
-
-    _bytesWritten += PutLE32(_videoFormatHeader.biSize);
-    _bytesWritten += PutLE32(_videoFormatHeader.biWidth);
-    _bytesWritten += PutLE32(_videoFormatHeader.biHeight);
-    _bytesWritten += PutLE16(_videoFormatHeader.biPlanes);
-    _bytesWritten += PutLE16(_videoFormatHeader.biBitCount);
-    _bytesWritten += PutLE32(_videoFormatHeader.biCompression);
-    _bytesWritten += PutLE32(_videoFormatHeader.biSizeImage);
-    _bytesWritten += PutLE32(_videoFormatHeader.biXPelsPerMeter);
-    _bytesWritten += PutLE32(_videoFormatHeader.biYPelsPerMeter);
-    _bytesWritten += PutLE32(_videoFormatHeader.biClrUsed);
-    _bytesWritten += PutLE32(_videoFormatHeader.biClrImportant);
-
-    const bool isMpegFile = _videoStreamHeader.fccHandler ==
-        AviFile::MakeFourCc('M','4','S','2');
-    if (isMpegFile)
-    {
-        if (_videoCodecConfigParams && _videoCodecConfigParamsLength > 0)
-        {
-            _bytesWritten += PutBuffer(_videoCodecConfigParams,
-                                       _videoCodecConfigParamsLength);
-        }
-    }
-
-    PutLE32LengthFromCurrent(static_cast<long>(strfSizeMark));
-    // End of strf
-
-    if ( _videoCodecConfigParams
-         && (_videoCodecConfigParamsLength > 0)
-         && !isMpegFile)
-    {
-        // Write strd, unless it's an MPEG file
-        const uint32_t strdTag = MakeFourCc('s', 't', 'r', 'd');
-        _bytesWritten += PutLE32(strdTag);
-
-        // Size is unknown at this point. Update later.
-        _bytesWritten += PutLE32(0);
-        const size_t strdSizeMark = _bytesWritten;
-
-        _bytesWritten += PutBuffer(_videoCodecConfigParams,
-                                   _videoCodecConfigParamsLength);
-
-        PutLE32LengthFromCurrent(static_cast<long>(strdSizeMark));
-        // End of strd
-    }
-
-    // Start of strn
-    const uint32_t strnTag = MakeFourCc('s', 't', 'r', 'n');
-    _bytesWritten += PutLE32(strnTag);
-
-    // Size is unknown at this point. Update later.
-    _bytesWritten += PutLE32(0);
-    const size_t strnSizeMark = _bytesWritten;
-
-    _bytesWritten += PutBufferZ("WebRtc.avi ");
-
-    PutLE32LengthFromCurrent(static_cast<long>(strnSizeMark));
-    // End of strd
-
-    return 0;
-}
-
-int32_t AviFile::WriteAVIAudioStreamHeaders()
-{
-    // Start of LIST
-    uint32_t listTag = MakeFourCc('L', 'I', 'S', 'T');
-    _bytesWritten += PutLE32(listTag);
-
-    // Size is unknown at this point. Update later.
-    _bytesWritten += PutLE32(0);
-    const size_t liststrlSizeMark = _bytesWritten;
-
-    uint32_t hdrlTag = MakeFourCc('s', 't', 'r', 'l');
-    _bytesWritten += PutLE32(hdrlTag);
-
-    WriteAVIAudioStreamHeaderChunks();
-
-    PutLE32LengthFromCurrent(static_cast<long>(liststrlSizeMark));
-    //End of LIST
-    return 0;
-}
-
-int32_t AviFile::WriteAVIAudioStreamHeaderChunks()
-{
-    // Start of strh
-    const uint32_t strhTag = MakeFourCc('s', 't', 'r', 'h');
-    _bytesWritten += PutLE32(strhTag);
-
-    // Size is unknown at this point. Update later.
-    _bytesWritten += PutLE32(0);
-    const size_t strhSizeMark = _bytesWritten;
-
-    _bytesWritten += PutLE32(_audioStreamHeader.fccType);
-    _bytesWritten += PutLE32(_audioStreamHeader.fccHandler);
-    _bytesWritten += PutLE32(_audioStreamHeader.dwFlags);
-    _bytesWritten += PutLE16(_audioStreamHeader.wPriority);
-    _bytesWritten += PutLE16(_audioStreamHeader.wLanguage);
-    _bytesWritten += PutLE32(_audioStreamHeader.dwInitialFrames);
-    _bytesWritten += PutLE32(_audioStreamHeader.dwScale);
-    _bytesWritten += PutLE32(_audioStreamHeader.dwRate);
-    _bytesWritten += PutLE32(_audioStreamHeader.dwStart);
-
-    _audioStreamLengthMark = _bytesWritten;
-    _bytesWritten += PutLE32(_audioStreamHeader.dwLength);
-
-    _bytesWritten += PutLE32(_audioStreamHeader.dwSuggestedBufferSize);
-    _bytesWritten += PutLE32(_audioStreamHeader.dwQuality);
-    _bytesWritten += PutLE32(_audioStreamHeader.dwSampleSize);
-    _bytesWritten += PutLE16(_audioStreamHeader.rcFrame.left);
-    _bytesWritten += PutLE16(_audioStreamHeader.rcFrame.top);
-    _bytesWritten += PutLE16(_audioStreamHeader.rcFrame.right);
-    _bytesWritten += PutLE16(_audioStreamHeader.rcFrame.bottom);
-
-    PutLE32LengthFromCurrent(static_cast<long>(strhSizeMark));
-    // End of strh
-
-    // Start of strf
-    const uint32_t strfTag = MakeFourCc('s', 't', 'r', 'f');
-    _bytesWritten += PutLE32(strfTag);
-
-    // Size is unknown at this point. Update later.
-    _bytesWritten += PutLE32(0);
-    const size_t strfSizeMark = _bytesWritten;
-
-    _bytesWritten += PutLE16(_audioFormatHeader.wFormatTag);
-    _bytesWritten += PutLE16(_audioFormatHeader.nChannels);
-    _bytesWritten += PutLE32(_audioFormatHeader.nSamplesPerSec);
-    _bytesWritten += PutLE32(_audioFormatHeader.nAvgBytesPerSec);
-    _bytesWritten += PutLE16(_audioFormatHeader.nBlockAlign);
-    _bytesWritten += PutLE16(_audioFormatHeader.wBitsPerSample);
-    _bytesWritten += PutLE16(_audioFormatHeader.cbSize);
-
-    PutLE32LengthFromCurrent(static_cast<long>(strfSizeMark));
-    // End end of strf.
-
-    // Audio doesn't have strd.
-
-    // Start of strn
-    const uint32_t strnTag = MakeFourCc('s', 't', 'r', 'n');
-    _bytesWritten += PutLE32(strnTag);
-
-    // Size is unknown at this point. Update later.
-    _bytesWritten += PutLE32(0);
-    const size_t strnSizeMark = _bytesWritten;
-
-    _bytesWritten += PutBufferZ("WebRtc.avi ");
-
-    PutLE32LengthFromCurrent(static_cast<long>(strnSizeMark));
-    // End of strd.
-
-    return 0;
-}
-
-int32_t AviFile::WriteMoviStart()
-{
-    // Create template movi list. Fill out size when known (i.e. when closing
-    // file).
-    const uint32_t listTag = MakeFourCc('L', 'I', 'S', 'T');
-    _bytesWritten += PutLE32(listTag);
-
-    _bytesWritten += PutLE32(0); //Size! Change later!
-    _moviSizeMark = _bytesWritten;
-    _moviListOffset = ftell(_aviFile);
-
-    const uint32_t moviTag = MakeFourCc('m', 'o', 'v', 'i');
-    _bytesWritten += PutLE32(moviTag);
-
-    return 0;
-}
-
-size_t AviFile::PutByte(uint8_t byte)
-{
-    return fwrite(&byte, sizeof(uint8_t), sizeof(uint8_t),
-                  _aviFile);
-}
-
-size_t AviFile::PutLE16(uint16_t word)
-{
-    return fwrite(&word, sizeof(uint8_t), sizeof(uint16_t),
-                  _aviFile);
-}
-
-size_t AviFile::PutLE32(uint32_t word)
-{
-    return fwrite(&word, sizeof(uint8_t), sizeof(uint32_t),
-                  _aviFile);
-}
-
-size_t AviFile::PutBuffer(const uint8_t* str, size_t size)
-{
-    return fwrite(str, sizeof(uint8_t), size,
-                  _aviFile);
-}
-
-size_t AviFile::PutBufferZ(const char* str)
-{
-    // Include NULL charachter, hence the + 1
-    return PutBuffer(reinterpret_cast<const uint8_t*>(str),
-                     strlen(str) + 1);
-}
-
-long AviFile::PutLE32LengthFromCurrent(long startPos)
-{
-    const long endPos = ftell(_aviFile);
-    if (endPos < 0) {
-        return 0;
-    }
-    bool success = (0 == fseek(_aviFile, startPos - 4, SEEK_SET));
-    if (!success) {
-        assert(false);
-        return 0;
-    }
-    const long len = endPos - startPos;
-    if (endPos > startPos) {
-        PutLE32(len);
-    }
-    else {
-        assert(false);
-    }
-    success = (0 == fseek(_aviFile, endPos, SEEK_SET));
-    assert(success);
-    return len;
-}
-
-void AviFile::PutLE32AtPos(long pos, uint32_t word)
-{
-    const long currPos = ftell(_aviFile);
-    if (currPos < 0) {
-        assert(false);
-        return;
-    }
-    bool success = (0 == fseek(_aviFile, pos, SEEK_SET));
-    if (!success) {
-      assert(false);
-      return;
-    }
-    PutLE32(word);
-    success = (0 == fseek(_aviFile, currPos, SEEK_SET));
-    assert(success);
-}
-
-void AviFile::CloseRead()
-{
-    if (_aviFile)
-    {
-        fclose(_aviFile);
-        _aviFile = NULL;
-    }
-}
-
-void AviFile::CloseWrite()
-{
-    if (_created)
-    {
-        // Update everything that isn't known until the file is closed. The
-        // marks indicate where in the headers this update should be.
-        PutLE32LengthFromCurrent(static_cast<long>(_moviSizeMark));
-
-        PutLE32AtPos(static_cast<long>(_totNumFramesMark), _videoFrames);
-
-        if (_writeVideoStream)
-        {
-            PutLE32AtPos(static_cast<long>(_videoStreamLengthMark),
-                         _videoFrames);
-        }
-
-        if (_writeAudioStream)
-        {
-            PutLE32AtPos(static_cast<long>(_audioStreamLengthMark),
-                         _audioFrames);
-        }
-
-        WriteIndex();
-        PutLE32LengthFromCurrent(static_cast<long>(_riffSizeMark));
-        ClearIndexList();
-
-        if (_aviFile)
-        {
-            fclose(_aviFile);
-            _aviFile = NULL;
-        }
-    }
-}
-
-void AviFile::ResetMembers()
-{
-    ResetComplexMembers();
-
-    _aviFile = NULL;
-
-    _nrStreams     = 0;
-    _aviLength     = 0;
-    _dataLength    = 0;
-    _bytesRead     = 0;
-    _dataStartByte = 0;
-    _framesRead    = 0;
-    _videoFrames   = 0;
-    _audioFrames   = 0;
-
-    _reading = false;
-    _openedAs = AVI_AUDIO;
-    _loop = false;
-    _writing = false;
-
-    _bytesWritten          = 0;
-
-    _riffSizeMark          = 0;
-    _moviSizeMark          = 0;
-    _totNumFramesMark      = 0;
-    _videoStreamLengthMark = 0;
-    _audioStreamLengthMark = 0;
-
-    _writeAudioStream = false;
-    _writeVideoStream = false;
-
-    _aviMode                      = NotSet;
-    _videoCodecConfigParams       = 0;
-    _videoCodecConfigParamsLength = 0;
-
-    _videoStreamDataChunkPrefix = 0;
-    _audioStreamDataChunkPrefix = 0;
-
-    _created = false;
-
-    _moviListOffset = 0;
-
-    _videoConfigLength = 0;
-}
-
-void AviFile::ResetComplexMembers()
-{
-    memset(&_aviHeader, 0, sizeof(AVIMAINHEADER));
-    memset(&_videoStreamHeader, 0, sizeof(AVISTREAMHEADER));
-    memset(&_audioStreamHeader, 0, sizeof(AVISTREAMHEADER));
-    memset(&_videoFormatHeader, 0, sizeof(BITMAPINFOHEADER));
-    memset(&_audioFormatHeader, 0, sizeof(WAVEFORMATEX));
-    memset(_videoConfigParameters, 0, CODEC_CONFIG_LENGTH);
-    memset(_videoStreamName, 0, STREAM_NAME_LENGTH);
-    memset(_audioStreamName, 0, STREAM_NAME_LENGTH);
-    memset(&_videoStream, 0, sizeof(AVIStream));
-    memset(&_audioStream, 0, sizeof(AVIStream));
-}
-
-size_t AviFile::GetByte(uint8_t& word)
-{
-    return fread(&word, sizeof(uint8_t), sizeof(uint8_t), _aviFile);
-}
-
-size_t AviFile::GetLE16(uint16_t& word)
-{
-    return fread(&word, sizeof(uint8_t), sizeof(uint16_t),
-                 _aviFile);
-}
-
-size_t AviFile::GetLE32(uint32_t& word)
-{
-    return fread(&word, sizeof(uint8_t), sizeof(uint32_t),
-                 _aviFile);
-}
-
-size_t AviFile::GetBuffer(uint8_t* str, size_t size)
-{
-    return fread(str, sizeof(uint8_t), size, _aviFile);
-}
-
-int32_t AviFile::ReadRIFF()
-{
-    uint32_t tag;
-    _bytesRead = GetLE32(tag);
-    if (tag != MakeFourCc('R', 'I', 'F', 'F'))
-    {
-        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,  "Not a RIFF file!");
-        return -1;
-    }
-
-    uint32_t size;
-    _bytesRead += GetLE32(size);
-    _aviLength = size;
-
-    _bytesRead += GetLE32(tag);
-    if (tag != MakeFourCc('A', 'V', 'I', ' '))
-    {
-        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,  "Not an AVI file!");
-        return -1;
-    }
-
-    return 0;
-}
-
-int32_t AviFile::ReadHeaders()
-{
-    uint32_t tag;
-    _bytesRead += GetLE32(tag);
-    uint32_t size;
-    _bytesRead += GetLE32(size);
-
-    if (tag != MakeFourCc('L', 'I', 'S', 'T'))
-    {
-        return -1;
-    }
-
-    uint32_t listTag;
-    _bytesRead += GetLE32(listTag);
-    if (listTag != MakeFourCc('h', 'd', 'r', 'l'))
-    {
-        return -1;
-    }
-
-    int32_t err = ReadAVIMainHeader();
-    if (err)
-    {
-        return -1;
-    }
-
-    return 0;
-}
-
-int32_t AviFile::ReadAVIMainHeader()
-{
-    _bytesRead += GetLE32(_aviHeader.fcc);
-    _bytesRead += GetLE32(_aviHeader.cb);
-    _bytesRead += GetLE32(_aviHeader.dwMicroSecPerFrame);
-    _bytesRead += GetLE32(_aviHeader.dwMaxBytesPerSec);
-    _bytesRead += GetLE32(_aviHeader.dwPaddingGranularity);
-    _bytesRead += GetLE32(_aviHeader.dwFlags);
-    _bytesRead += GetLE32(_aviHeader.dwTotalFrames);
-    _bytesRead += GetLE32(_aviHeader.dwInitialFrames);
-    _bytesRead += GetLE32(_aviHeader.dwStreams);
-    _bytesRead += GetLE32(_aviHeader.dwSuggestedBufferSize);
-    _bytesRead += GetLE32(_aviHeader.dwWidth);
-    _bytesRead += GetLE32(_aviHeader.dwHeight);
-    _bytesRead += GetLE32(_aviHeader.dwReserved[0]);
-    _bytesRead += GetLE32(_aviHeader.dwReserved[1]);
-    _bytesRead += GetLE32(_aviHeader.dwReserved[2]);
-    _bytesRead += GetLE32(_aviHeader.dwReserved[3]);
-
-    if (_aviHeader.fcc != MakeFourCc('a', 'v', 'i', 'h'))
-    {
-        return -1;
-    }
-
-    if (_aviHeader.dwFlags & kAvifMustuseindex)
-    {
-        return -1;
-    }
-
-    bool readVideoStreamHeader = false;
-    bool readAudioStreamHeader = false;
-    unsigned int streamsRead = 0;
-    while (_aviHeader.dwStreams > streamsRead)
-    {
-        uint32_t strltag;
-        _bytesRead += GetLE32(strltag);
-        uint32_t strlsize;
-        _bytesRead += GetLE32(strlsize);
-        const long endSeekPos = ftell(_aviFile) +
-            static_cast<int32_t>(strlsize);
-
-        if (strltag != MakeFourCc('L', 'I', 'S', 'T'))
-        {
-            return -1;
-        }
-
-        uint32_t listTag;
-        _bytesRead += GetLE32(listTag);
-        if (listTag != MakeFourCc('s', 't', 'r', 'l'))
-        {
-            return -1;
-        }
-
-        uint32_t chunktag;
-        _bytesRead += GetLE32(chunktag);
-        uint32_t chunksize;
-        _bytesRead += GetLE32(chunksize);
-
-        if (chunktag != MakeFourCc('s', 't', 'r', 'h'))
-        {
-            return -1;
-        }
-
-        AVISTREAMHEADER tmpStreamHeader;
-        tmpStreamHeader.fcc = chunktag;
-        tmpStreamHeader.cb  = chunksize;
-
-        _bytesRead += GetLE32(tmpStreamHeader.fccType);
-        _bytesRead += GetLE32(tmpStreamHeader.fccHandler);
-        _bytesRead += GetLE32(tmpStreamHeader.dwFlags);
-        _bytesRead += GetLE16(tmpStreamHeader.wPriority);
-        _bytesRead += GetLE16(tmpStreamHeader.wLanguage);
-        _bytesRead += GetLE32(tmpStreamHeader.dwInitialFrames);
-        _bytesRead += GetLE32(tmpStreamHeader.dwScale);
-        _bytesRead += GetLE32(tmpStreamHeader.dwRate);
-        _bytesRead += GetLE32(tmpStreamHeader.dwStart);
-        _bytesRead += GetLE32(tmpStreamHeader.dwLength);
-        _bytesRead += GetLE32(tmpStreamHeader.dwSuggestedBufferSize);
-        _bytesRead += GetLE32(tmpStreamHeader.dwQuality);
-        _bytesRead += GetLE32(tmpStreamHeader.dwSampleSize);
-
-        uint16_t left;
-        _bytesRead += GetLE16(left);
-        tmpStreamHeader.rcFrame.left = left;
-        uint16_t top;
-        _bytesRead += GetLE16(top);
-        tmpStreamHeader.rcFrame.top = top;
-        uint16_t right;
-        _bytesRead += GetLE16(right);
-        tmpStreamHeader.rcFrame.right = right;
-        uint16_t bottom;
-        _bytesRead += GetLE16(bottom);
-        tmpStreamHeader.rcFrame.bottom = bottom;
-
-        if (!readVideoStreamHeader
-            && (tmpStreamHeader.fccType == MakeFourCc('v', 'i', 'd', 's')))
-        {
-            _videoStreamHeader = tmpStreamHeader; //Bitwise copy is OK!
-            const int32_t err = ReadAVIVideoStreamHeader(endSeekPos);
-            if (err)
-            {
-                return -1;
-            }
-            // Make sure there actually is video data in the file...
-            if (_videoStreamHeader.dwLength == 0)
-            {
-                return -1;
-            }
-            readVideoStreamHeader = true;
-        } else if(!readAudioStreamHeader &&
-                  (tmpStreamHeader.fccType == MakeFourCc('a', 'u', 'd', 's'))) {
-            _audioStreamHeader = tmpStreamHeader;
-            const int32_t err = ReadAVIAudioStreamHeader(endSeekPos);
-            if (err)
-            {
-                return -1;
-            }
-            readAudioStreamHeader = true;
-        }
-        else
-        {
-            fseek(_aviFile, endSeekPos, SEEK_SET);
-            _bytesRead += endSeekPos;
-        }
-
-        ++streamsRead;
-    }
-
-    if (!readVideoStreamHeader && !readAudioStreamHeader)
-    {
-        return -1;
-    }
-
-    uint32_t tag;
-    _bytesRead += GetLE32(tag);
-    uint32_t size;
-    _bytesRead += GetLE32(size);
-
-    if (tag == MakeFourCc('J', 'U', 'N', 'K'))
-    {
-        fseek(_aviFile, size, SEEK_CUR);
-        _bytesRead += size;
-        _bytesRead += GetLE32(tag);
-        _bytesRead += GetLE32(size);
-    }
-    if (tag != MakeFourCc('L', 'I', 'S', 'T'))
-    {
-        return -1;
-    }
-    uint32_t listTag;
-    _bytesRead += GetLE32(listTag);
-    if (listTag != MakeFourCc('m', 'o', 'v', 'i'))
-    {
-        return -1;
-    }
-    _dataLength = size;
-    return 0;
-}
-
-int32_t AviFile::ReadAVIVideoStreamHeader(int32_t endpos)
-{
-    uint32_t chunktag;
-    _bytesRead += GetLE32(chunktag);
-    uint32_t chunksize;
-    _bytesRead += GetLE32(chunksize);
-
-    if (chunktag != MakeFourCc('s', 't', 'r', 'f'))
-    {
-        return -1;
-    }
-
-    _bytesRead += GetLE32(_videoFormatHeader.biSize);
-    _bytesRead += GetLE32(_videoFormatHeader.biWidth);
-    _bytesRead += GetLE32(_videoFormatHeader.biHeight);
-    _bytesRead += GetLE16(_videoFormatHeader.biPlanes);
-    _bytesRead += GetLE16(_videoFormatHeader.biBitCount);
-    _bytesRead += GetLE32(_videoFormatHeader.biCompression);
-    _bytesRead += GetLE32(_videoFormatHeader.biSizeImage);
-    _bytesRead += GetLE32(_videoFormatHeader.biXPelsPerMeter);
-    _bytesRead += GetLE32(_videoFormatHeader.biYPelsPerMeter);
-    _bytesRead += GetLE32(_videoFormatHeader.biClrUsed);
-    _bytesRead += GetLE32(_videoFormatHeader.biClrImportant);
-
-    if (chunksize >  _videoFormatHeader.biSize)
-    {
-        const uint32_t size = chunksize - _videoFormatHeader.biSize;
-        const uint32_t readSize = MinValue(size, CODEC_CONFIG_LENGTH);
-        _bytesRead += GetBuffer(
-            reinterpret_cast<uint8_t*>(_videoConfigParameters), readSize);
-        _videoConfigLength = readSize;
-        int32_t skipSize = chunksize - _videoFormatHeader.biSize -
-            readSize;
-        if (skipSize > 0)
-        {
-            fseek(_aviFile, skipSize, SEEK_CUR);
-            _bytesRead += skipSize;
-        }
-    }
-
-    while (static_cast<long>(_bytesRead) < endpos)
-    {
-        uint32_t chunktag;
-        _bytesRead += GetLE32(chunktag);
-        uint32_t chunksize;
-        _bytesRead += GetLE32(chunksize);
-
-        if (chunktag == MakeFourCc('s', 't', 'r', 'n'))
-        {
-            const uint32_t size = MinValue(chunksize, STREAM_NAME_LENGTH);
-            _bytesRead += GetBuffer(
-                reinterpret_cast<uint8_t*>(_videoStreamName), size);
-        }
-        else if (chunktag == MakeFourCc('s', 't', 'r', 'd'))
-        {
-            const uint32_t size = MinValue(chunksize, CODEC_CONFIG_LENGTH);
-            _bytesRead += GetBuffer(
-                reinterpret_cast<uint8_t*>(_videoConfigParameters), size);
-            _videoConfigLength = size;
-        }
-        else
-        {
-            fseek(_aviFile, chunksize, SEEK_CUR);
-            _bytesRead += chunksize;
-        }
-
-        if (feof(_aviFile))
-        {
-            return -1;
-        }
-    }
-    _videoStream.streamType = AviFile::AVI_VIDEO;
-    _videoStream.streamNumber = _nrStreams++;
-
-    return 0;
-}
-
-int32_t AviFile::ReadAVIAudioStreamHeader(int32_t endpos)
-{
-    uint32_t chunktag;
-    _bytesRead += GetLE32(chunktag);
-    uint32_t chunksize;
-    _bytesRead += GetLE32(chunksize);
-
-    if (chunktag != MakeFourCc('s', 't', 'r', 'f'))
-    {
-        return -1;
-    }
-
-    const size_t startRead = _bytesRead;
-    _bytesRead += GetLE16(_audioFormatHeader.wFormatTag);
-    _bytesRead += GetLE16(_audioFormatHeader.nChannels);
-    _bytesRead += GetLE32(_audioFormatHeader.nSamplesPerSec);
-    _bytesRead += GetLE32(_audioFormatHeader.nAvgBytesPerSec);
-    _bytesRead += GetLE16(_audioFormatHeader.nBlockAlign);
-    _bytesRead += GetLE16(_audioFormatHeader.wBitsPerSample);
-    if (chunksize > 0x10) {
-        _bytesRead += GetLE16(_audioFormatHeader.cbSize);
-    }
-
-    const uint32_t diffRead = chunksize - (_bytesRead - startRead);
-    if (diffRead > 0)
-    {
-        const uint32_t size = MinValue(diffRead, CODEC_CONFIG_LENGTH);
-        _bytesRead += GetBuffer(
-            reinterpret_cast<uint8_t*>(_audioConfigParameters), size);
-    }
-
-    while (static_cast<long>(_bytesRead) < endpos)
-    {
-        uint32_t chunktag;
-        _bytesRead += GetLE32(chunktag);
-        uint32_t chunksize;
-        _bytesRead += GetLE32(chunksize);
-
-        if (chunktag == MakeFourCc('s', 't', 'r', 'n'))
-        {
-            const uint32_t size = MinValue(chunksize, STREAM_NAME_LENGTH);
-            _bytesRead += GetBuffer(
-                reinterpret_cast<uint8_t*>(_audioStreamName), size);
-        }
-        else if (chunktag == MakeFourCc('s', 't', 'r', 'd'))
-        {
-            const uint32_t size = MinValue(chunksize, CODEC_CONFIG_LENGTH);
-            _bytesRead += GetBuffer(
-                reinterpret_cast<uint8_t*>(_audioConfigParameters), size);
-        }
-        else
-        {
-            fseek(_aviFile, chunksize, SEEK_CUR);
-            _bytesRead += chunksize;
-        }
-
-        if (feof(_aviFile))
-        {
-            return -1;
-        }
-    }
-    _audioStream.streamType = AviFile::AVI_AUDIO;
-    _audioStream.streamNumber = _nrStreams++;
-    return 0;
-}
-
-uint32_t AviFile::StreamAndTwoCharCodeToTag(int32_t streamNum,
-                                            const char* twoCharCode)
-{
-    uint8_t a = '0';
-    uint8_t b;
-    switch (streamNum)
-    {
-    case 1:
-        b = '1';
-        break;
-    case 2:
-        b = '2';
-        break;
-    default:
-        b = '0';
-    }
-    return MakeFourCc(a, b, twoCharCode[0], twoCharCode[1]);
-}
-
-void AviFile::ClearIndexList()
-{
-  for (IndexList::iterator iter = _indexList.begin();
-       iter != _indexList.end(); ++iter) {
-      delete *iter;
-  }
-  _indexList.clear();
-}
-
-void AviFile::AddChunkToIndexList(uint32_t inChunkId,
-                                  uint32_t inFlags,
-                                  uint32_t inOffset,
-                                  uint32_t inSize)
-{
-    _indexList.push_back(new AVIINDEXENTRY(inChunkId, inFlags, inOffset,
-                                           inSize));
-}
-
-void AviFile::WriteIndex()
-{
-    const uint32_t idxTag = MakeFourCc('i', 'd', 'x', '1');
-    _bytesWritten += PutLE32(idxTag);
-
-    // Size is unknown at this point. Update later.
-    _bytesWritten += PutLE32(0);
-    const size_t idxChunkSize = _bytesWritten;
-
-    for (IndexList::iterator iter = _indexList.begin();
-         iter != _indexList.end(); ++iter) {
-        const AVIINDEXENTRY* item = *iter;
-        _bytesWritten += PutLE32(item->ckid);
-        _bytesWritten += PutLE32(item->dwFlags);
-        _bytesWritten += PutLE32(item->dwChunkOffset);
-        _bytesWritten += PutLE32(item->dwChunkLength);
-    }
-    PutLE32LengthFromCurrent(static_cast<long>(idxChunkSize));
-}
-}  // namespace webrtc
diff --git a/webrtc/modules/media_file/source/avi_file.h b/webrtc/modules/media_file/source/avi_file.h
deleted file mode 100644
index 1b5b746..0000000
--- a/webrtc/modules/media_file/source/avi_file.h
+++ /dev/null
@@ -1,277 +0,0 @@
-/*
- *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-// Class for reading (x)or writing to an AVI file.
-// Note: the class cannot be used for reading and writing at the same time.
-#ifndef WEBRTC_MODULES_MEDIA_FILE_SOURCE_AVI_FILE_H_
-#define WEBRTC_MODULES_MEDIA_FILE_SOURCE_AVI_FILE_H_
-
-#include <stdio.h>
-#include <list>
-
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-class CriticalSectionWrapper;
-
-struct AVISTREAMHEADER
-{
-    AVISTREAMHEADER();
-    uint32_t fcc;
-    uint32_t cb;
-    uint32_t fccType;
-    uint32_t fccHandler;
-    uint32_t dwFlags;
-    uint16_t wPriority;
-    uint16_t wLanguage;
-    uint32_t dwInitialFrames;
-    uint32_t dwScale;
-    uint32_t dwRate;
-    uint32_t dwStart;
-    uint32_t dwLength;
-    uint32_t dwSuggestedBufferSize;
-    uint32_t dwQuality;
-    uint32_t dwSampleSize;
-    struct
-    {
-        int16_t left;
-        int16_t top;
-        int16_t right;
-        int16_t bottom;
-    } rcFrame;
-};
-
-struct BITMAPINFOHEADER
-{
-    BITMAPINFOHEADER();
-    uint32_t biSize;
-    uint32_t biWidth;
-    uint32_t biHeight;
-    uint16_t biPlanes;
-    uint16_t biBitCount;
-    uint32_t biCompression;
-    uint32_t biSizeImage;
-    uint32_t biXPelsPerMeter;
-    uint32_t biYPelsPerMeter;
-    uint32_t biClrUsed;
-    uint32_t biClrImportant;
-};
-
-struct WAVEFORMATEX
-{
-    WAVEFORMATEX();
-    uint16_t wFormatTag;
-    uint16_t nChannels;
-    uint32_t nSamplesPerSec;
-    uint32_t nAvgBytesPerSec;
-    uint16_t nBlockAlign;
-    uint16_t wBitsPerSample;
-    uint16_t cbSize;
-};
-
-class AviFile
-{
-public:
-    enum AVIStreamType
-    {
-        AVI_AUDIO = 0,
-        AVI_VIDEO = 1
-    };
-
-    // Unsigned, for comparison with must-be-unsigned types.
-    static const unsigned int CODEC_CONFIG_LENGTH = 64;
-    static const unsigned int STREAM_NAME_LENGTH  = 32;
-
-    AviFile();
-    ~AviFile();
-
-    int32_t Open(AVIStreamType streamType, const char* fileName,
-                 bool loop = false);
-
-    int32_t CreateVideoStream(const AVISTREAMHEADER& videoStreamHeader,
-                              const BITMAPINFOHEADER& bitMapInfoHeader,
-                              const uint8_t* codecConfigParams,
-                              int32_t codecConfigParamsLength);
-
-    int32_t CreateAudioStream(const AVISTREAMHEADER& audioStreamHeader,
-                              const WAVEFORMATEX& waveFormatHeader);
-    int32_t Create(const char* fileName);
-
-    int32_t WriteAudio(const uint8_t* data, size_t length);
-    int32_t WriteVideo(const uint8_t* data, size_t length);
-
-    int32_t GetVideoStreamInfo(AVISTREAMHEADER& videoStreamHeader,
-                               BITMAPINFOHEADER& bitmapInfo,
-                               char* codecConfigParameters,
-                               int32_t& configLength);
-
-    int32_t GetDuration(int32_t& durationMs);
-
-    int32_t GetAudioStreamInfo(WAVEFORMATEX& waveHeader);
-
-    int32_t ReadAudio(uint8_t* data, size_t& length);
-    int32_t ReadVideo(uint8_t* data, size_t& length);
-
-    int32_t Close();
-
-    static uint32_t MakeFourCc(uint8_t ch0, uint8_t ch1, uint8_t ch2,
-                               uint8_t ch3);
-
-private:
-    enum AVIFileMode
-    {
-        NotSet,
-        Read,
-        Write
-    };
-
-    struct AVIINDEXENTRY
-    {
-        AVIINDEXENTRY(uint32_t inckid, uint32_t indwFlags,
-                      uint32_t indwChunkOffset,
-                      uint32_t indwChunkLength);
-        uint32_t ckid;
-        uint32_t dwFlags;
-        uint32_t dwChunkOffset;
-        uint32_t dwChunkLength;
-    };
-
-    int32_t PrepareDataChunkHeaders();
-
-    int32_t ReadMoviSubChunk(uint8_t* data, size_t& length, uint32_t tag1,
-                             uint32_t tag2 = 0);
-
-    int32_t WriteRIFF();
-    int32_t WriteHeaders();
-    int32_t WriteAVIMainHeader();
-    int32_t WriteAVIStreamHeaders();
-    int32_t WriteAVIVideoStreamHeaders();
-    int32_t WriteAVIVideoStreamHeaderChunks();
-    int32_t WriteAVIAudioStreamHeaders();
-    int32_t WriteAVIAudioStreamHeaderChunks();
-
-    int32_t WriteMoviStart();
-
-    size_t PutByte(uint8_t byte);
-    size_t PutLE16(uint16_t word);
-    size_t PutLE32(uint32_t word);
-    size_t PutBuffer(const uint8_t* str, size_t size);
-    size_t PutBufferZ(const char* str);
-    long PutLE32LengthFromCurrent(long startPos);
-    void PutLE32AtPos(long pos, uint32_t word);
-
-    size_t GetByte(uint8_t& word);
-    size_t GetLE16(uint16_t& word);
-    size_t GetLE32(uint32_t& word);
-    size_t GetBuffer(uint8_t* str, size_t size);
-
-    void CloseRead();
-    void CloseWrite();
-
-    void ResetMembers();
-    void ResetComplexMembers();
-
-    int32_t ReadRIFF();
-    int32_t ReadHeaders();
-    int32_t ReadAVIMainHeader();
-    int32_t ReadAVIVideoStreamHeader(int32_t endpos);
-    int32_t ReadAVIAudioStreamHeader(int32_t endpos);
-
-    uint32_t StreamAndTwoCharCodeToTag(int32_t streamNum,
-                                       const char* twoCharCode);
-
-    void ClearIndexList();
-    void AddChunkToIndexList(uint32_t inChunkId, uint32_t inFlags,
-                             uint32_t inOffset,  uint32_t inSize);
-
-    void WriteIndex();
-
-private:
-    typedef std::list<AVIINDEXENTRY*> IndexList;
-    struct AVIMAINHEADER
-    {
-        AVIMAINHEADER();
-        uint32_t fcc;
-        uint32_t cb;
-        uint32_t dwMicroSecPerFrame;
-        uint32_t dwMaxBytesPerSec;
-        uint32_t dwPaddingGranularity;
-        uint32_t dwFlags;
-        uint32_t dwTotalFrames;
-        uint32_t dwInitialFrames;
-        uint32_t dwStreams;
-        uint32_t dwSuggestedBufferSize;
-        uint32_t dwWidth;
-        uint32_t dwHeight;
-        uint32_t dwReserved[4];
-    };
-
-    struct AVIStream
-    {
-        AVIStreamType streamType;
-        int           streamNumber;
-    };
-
-    CriticalSectionWrapper* _crit;
-    FILE*            _aviFile;
-    AVIMAINHEADER    _aviHeader;
-    AVISTREAMHEADER  _videoStreamHeader;
-    AVISTREAMHEADER  _audioStreamHeader;
-    BITMAPINFOHEADER _videoFormatHeader;
-    WAVEFORMATEX     _audioFormatHeader;
-
-    int8_t _videoConfigParameters[CODEC_CONFIG_LENGTH];
-    int32_t _videoConfigLength;
-    int8_t _videoStreamName[STREAM_NAME_LENGTH];
-    int8_t _audioConfigParameters[CODEC_CONFIG_LENGTH];
-    int8_t _audioStreamName[STREAM_NAME_LENGTH];
-
-    AVIStream _videoStream;
-    AVIStream _audioStream;
-
-    int32_t _nrStreams;
-    int32_t _aviLength;
-    int32_t _dataLength;
-    size_t        _bytesRead;
-    size_t        _dataStartByte;
-    int32_t _framesRead;
-    int32_t _videoFrames;
-    int32_t _audioFrames;
-
-    bool _reading;
-    AVIStreamType _openedAs;
-    bool _loop;
-    bool _writing;
-
-    size_t _bytesWritten;
-
-    size_t _riffSizeMark;
-    size_t _moviSizeMark;
-    size_t _totNumFramesMark;
-    size_t _videoStreamLengthMark;
-    size_t _audioStreamLengthMark;
-    int32_t _moviListOffset;
-
-    bool _writeAudioStream;
-    bool _writeVideoStream;
-
-    AVIFileMode _aviMode;
-    uint8_t* _videoCodecConfigParams;
-    int32_t _videoCodecConfigParamsLength;
-
-    uint32_t _videoStreamDataChunkPrefix;
-    uint32_t _audioStreamDataChunkPrefix;
-    bool _created;
-
-    IndexList _indexList;
-};
-}  // namespace webrtc
-
-#endif // WEBRTC_MODULES_MEDIA_FILE_SOURCE_AVI_FILE_H_
diff --git a/webrtc/modules/media_file/source/media_file_impl.cc b/webrtc/modules/media_file/source/media_file_impl.cc
index 60aeefe..83bb9b5 100644
--- a/webrtc/modules/media_file/source/media_file_impl.cc
+++ b/webrtc/modules/media_file/source/media_file_impl.cc
@@ -102,22 +102,9 @@
     return -1;
 }
 
-int32_t MediaFileImpl::PlayoutAVIVideoData(
-    int8_t* buffer,
-    size_t& dataLengthInBytes)
-{
-    return PlayoutData( buffer, dataLengthInBytes, true);
-}
-
 int32_t MediaFileImpl::PlayoutAudioData(int8_t* buffer,
                                         size_t& dataLengthInBytes)
 {
-    return PlayoutData( buffer, dataLengthInBytes, false);
-}
-
-int32_t MediaFileImpl::PlayoutData(int8_t* buffer, size_t& dataLengthInBytes,
-                                   bool video)
-{
     WEBRTC_TRACE(kTraceStream, kTraceFile, _id,
                "MediaFileImpl::PlayoutData(buffer= 0x%x, bufLen= %" PRIuS ")",
                  buffer, dataLengthInBytes);
@@ -184,28 +171,12 @@
                     return 0;
                 }
                 break;
-            case kFileFormatAviFile:
+            default:
             {
-#ifdef WEBRTC_MODULE_UTILITY_VIDEO
-                if(video)
-                {
-                    bytesRead = _ptrFileUtilityObj->ReadAviVideoData(
-                        buffer,
-                        bufferLengthInBytes);
-                }
-                else
-                {
-                    bytesRead = _ptrFileUtilityObj->ReadAviAudioData(
-                        buffer,
-                        bufferLengthInBytes);
-                }
-                break;
-#else
                 WEBRTC_TRACE(kTraceError, kTraceFile, _id,
-                             "Invalid file format: %d", kFileFormatAviFile);
+                             "Invalid file format: %d", _fileFormat);
                 assert(false);
                 break;
-#endif
             }
         }
 
@@ -368,36 +339,6 @@
     const uint32_t startPointMs,
     const uint32_t stopPointMs)
 {
-    const bool videoOnly = false;
-    return StartPlayingFile(fileName, notificationTimeMs, loop, videoOnly,
-                            format, codecInst, startPointMs, stopPointMs);
-}
-
-
-int32_t MediaFileImpl::StartPlayingVideoFile(const char* fileName,
-                                             const bool loop,
-                                             bool videoOnly,
-                                             const FileFormats format)
-{
-
-    const uint32_t notificationTimeMs = 0;
-    const uint32_t startPointMs       = 0;
-    const uint32_t stopPointMs        = 0;
-    return StartPlayingFile(fileName, notificationTimeMs, loop, videoOnly,
-                            format, 0, startPointMs, stopPointMs);
-}
-
-int32_t MediaFileImpl::StartPlayingFile(
-    const char* fileName,
-    const uint32_t notificationTimeMs,
-    const bool loop,
-    bool videoOnly,
-    const FileFormats format,
-    const CodecInst* codecInst,
-    const uint32_t startPointMs,
-    const uint32_t stopPointMs)
-{
-
     if(!ValidFileName(fileName))
     {
         return -1;
@@ -432,27 +373,18 @@
         return -1;
     }
 
-    // TODO (hellner): make all formats support reading from stream.
-    bool useStream = (format != kFileFormatAviFile);
-    if( useStream)
+    if(inputStream->OpenFile(fileName, true, loop) != 0)
     {
-        if(inputStream->OpenFile(fileName, true, loop) != 0)
-        {
-            delete inputStream;
-            WEBRTC_TRACE(kTraceError, kTraceFile, _id,
-                         "Could not open input file %s", fileName);
-            return -1;
-        }
+        delete inputStream;
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "Could not open input file %s", fileName);
+        return -1;
     }
 
-    if(StartPlayingStream(*inputStream, fileName, loop, notificationTimeMs,
-                          format, codecInst, startPointMs, stopPointMs,
-                          videoOnly) == -1)
+    if(StartPlayingStream(*inputStream, loop, notificationTimeMs,
+                          format, codecInst, startPointMs, stopPointMs) == -1)
     {
-        if( useStream)
-        {
-            inputStream->CloseFile();
-        }
+        inputStream->CloseFile();
         delete inputStream;
         return -1;
     }
@@ -472,20 +404,18 @@
     const uint32_t startPointMs,
     const uint32_t stopPointMs)
 {
-    return StartPlayingStream(stream, 0, false, notificationTimeMs, format,
+    return StartPlayingStream(stream, false, notificationTimeMs, format,
                               codecInst, startPointMs, stopPointMs);
 }
 
 int32_t MediaFileImpl::StartPlayingStream(
     InStream& stream,
-    const char* filename,
     bool loop,
     const uint32_t notificationTimeMs,
     const FileFormats format,
     const CodecInst*  codecInst,
     const uint32_t startPointMs,
-    const uint32_t stopPointMs,
-    bool videoOnly)
+    const uint32_t stopPointMs)
 {
     if(!ValidFileFormat(format,codecInst))
     {
@@ -593,28 +523,12 @@
             _fileFormat = kFileFormatPreencodedFile;
             break;
         }
-        case kFileFormatAviFile:
+        default:
         {
-#ifdef WEBRTC_MODULE_UTILITY_VIDEO
-            if(_ptrFileUtilityObj->InitAviReading( filename, videoOnly, loop))
-            {
-                WEBRTC_TRACE(kTraceError, kTraceFile, _id,
-                             "Not a valid AVI file!");
-                StopPlaying();
-
-                return -1;
-            }
-
-            _ptrFileUtilityObj->codec_info(codec_info_);
-
-            _fileFormat = kFileFormatAviFile;
-            break;
-#else
             WEBRTC_TRACE(kTraceError, kTraceFile, _id,
-                         "Invalid file format: %d", kFileFormatAviFile);
+                         "Invalid file format: %d", format);
             assert(false);
             break;
-#endif
         }
     }
     if(_ptrFileUtilityObj->codec_info(codec_info_) == -1)
@@ -687,21 +601,6 @@
     const int8_t*  buffer,
     const size_t bufferLengthInBytes)
 {
-    return IncomingAudioVideoData( buffer, bufferLengthInBytes, false);
-}
-
-int32_t MediaFileImpl::IncomingAVIVideoData(
-    const int8_t*  buffer,
-    const size_t bufferLengthInBytes)
-{
-    return IncomingAudioVideoData( buffer, bufferLengthInBytes, true);
-}
-
-int32_t MediaFileImpl::IncomingAudioVideoData(
-    const int8_t*  buffer,
-    const size_t bufferLengthInBytes,
-    const bool video)
-{
     WEBRTC_TRACE(kTraceStream, kTraceFile, _id,
                  "MediaFile::IncomingData(buffer= 0x%x, bufLen= %" PRIuS,
                  buffer, bufferLengthInBytes);
@@ -772,24 +671,11 @@
                     bytesWritten = _ptrFileUtilityObj->WritePreEncodedData(
                         *_ptrOutStream, buffer, bufferLengthInBytes);
                     break;
-                case kFileFormatAviFile:
-#ifdef WEBRTC_MODULE_UTILITY_VIDEO
-                    if(video)
-                    {
-                        bytesWritten = _ptrFileUtilityObj->WriteAviVideoData(
-                            buffer, bufferLengthInBytes);
-                    }else
-                    {
-                        bytesWritten = _ptrFileUtilityObj->WriteAviAudioData(
-                            buffer, bufferLengthInBytes);
-                    }
-                    break;
-#else
+                default:
                     WEBRTC_TRACE(kTraceError, kTraceFile, _id,
-                                 "Invalid file format: %d", kFileFormatAviFile);
+                                 "Invalid file format: %d", _fileFormat);
                     assert(false);
                     break;
-#endif
             }
         } else {
             // TODO (hellner): quick look at the code makes me think that this
@@ -803,10 +689,7 @@
             }
         }
 
-        if(!video)
-        {
-            _recordDurationMs += samplesWritten / (codec_info_.plfreq / 1000);
-        }
+        _recordDurationMs += samplesWritten / (codec_info_.plfreq / 1000);
 
         // Check if it's time for RecordNotification(..).
         if(_notificationMs)
@@ -850,36 +733,6 @@
     const uint32_t notificationTimeMs,
     const uint32_t maxSizeBytes)
 {
-    VideoCodec dummyCodecInst;
-    return StartRecordingFile(fileName, format, codecInst, dummyCodecInst,
-                              notificationTimeMs, maxSizeBytes);
-}
-
-
-int32_t MediaFileImpl::StartRecordingVideoFile(
-    const char* fileName,
-    const FileFormats format,
-    const CodecInst& codecInst,
-    const VideoCodec& videoCodecInst,
-    bool videoOnly)
-{
-    const uint32_t notificationTimeMs = 0;
-    const uint32_t maxSizeBytes       = 0;
-
-    return StartRecordingFile(fileName, format, codecInst, videoCodecInst,
-                              notificationTimeMs, maxSizeBytes, videoOnly);
-}
-
-int32_t MediaFileImpl::StartRecordingFile(
-    const char* fileName,
-    const FileFormats format,
-    const CodecInst& codecInst,
-    const VideoCodec& videoCodecInst,
-    const uint32_t notificationTimeMs,
-    const uint32_t maxSizeBytes,
-    bool videoOnly)
-{
-
     if(!ValidFileName(fileName))
     {
         return -1;
@@ -897,32 +750,24 @@
         return -1;
     }
 
-    // TODO (hellner): make all formats support writing to stream.
-    const bool useStream = ( format != kFileFormatAviFile);
-    if( useStream)
+    if(outputStream->OpenFile(fileName, false) != 0)
     {
-        if(outputStream->OpenFile(fileName, false) != 0)
-        {
-            delete outputStream;
-            WEBRTC_TRACE(kTraceError, kTraceFile, _id,
-                         "Could not open output file '%s' for writing!",
-                         fileName);
-            return -1;
-        }
+        delete outputStream;
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "Could not open output file '%s' for writing!",
+                     fileName);
+        return -1;
     }
+
     if(maxSizeBytes)
     {
         outputStream->SetMaxFileSize(maxSizeBytes);
     }
 
-    if(StartRecordingStream(*outputStream, fileName, format, codecInst,
-                            videoCodecInst, notificationTimeMs,
-                            videoOnly) == -1)
+    if(StartRecordingAudioStream(*outputStream, format, codecInst,
+                                 notificationTimeMs) == -1)
     {
-        if( useStream)
-        {
-            outputStream->CloseFile();
-        }
+        outputStream->CloseFile();
         delete outputStream;
         return -1;
     }
@@ -940,21 +785,6 @@
     const CodecInst& codecInst,
     const uint32_t notificationTimeMs)
 {
-    VideoCodec dummyCodecInst;
-    return StartRecordingStream(stream, 0, format, codecInst, dummyCodecInst,
-                                notificationTimeMs);
-}
-
-int32_t MediaFileImpl::StartRecordingStream(
-    OutStream& stream,
-    const char* fileName,
-    const FileFormats format,
-    const CodecInst& codecInst,
-    const VideoCodec& videoCodecInst,
-    const uint32_t notificationTimeMs,
-    bool videoOnly)
-{
-
     // Check codec info
     if(!ValidFileFormat(format,&codecInst))
     {
@@ -1055,25 +885,6 @@
             _fileFormat = kFileFormatPreencodedFile;
             break;
         }
-#ifdef WEBRTC_MODULE_UTILITY_VIDEO
-        case kFileFormatAviFile:
-        {
-            if( (_ptrFileUtilityObj->InitAviWriting(
-                    fileName,
-                    codecInst,
-                    videoCodecInst,videoOnly) == -1) ||
-                    (_ptrFileUtilityObj->codec_info(tmpAudioCodec) != 0))
-            {
-                WEBRTC_TRACE(kTraceError, kTraceFile, _id,
-                             "Failed to initialize AVI file!");
-                delete _ptrFileUtilityObj;
-                _ptrFileUtilityObj = NULL;
-                return -1;
-            }
-            _fileFormat = kFileFormatAviFile;
-            break;
-        }
-#endif
         default:
         {
             WEBRTC_TRACE(kTraceError, kTraceFile, _id,
@@ -1136,12 +947,6 @@
         {
             _ptrFileUtilityObj->UpdateWavHeader(*_ptrOutStream);
         }
-#ifdef WEBRTC_MODULE_UTILITY_VIDEO
-        else if( _fileFormat == kFileFormatAviFile)
-        {
-            _ptrFileUtilityObj->CloseAviFile( );
-        }
-#endif
         delete _ptrFileUtilityObj;
         _ptrFileUtilityObj = NULL;
     }
@@ -1268,32 +1073,6 @@
     return 0;
 }
 
-int32_t MediaFileImpl::VideoCodecInst(VideoCodec& codecInst) const
-{
-    CriticalSectionScoped lock(_crit);
-    if(!_playingActive && !_recordingActive)
-    {
-        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
-                     "Neither playout nor recording has been initialized!");
-        return -1;
-    }
-    if( _ptrFileUtilityObj == NULL)
-    {
-        return -1;
-    }
-#ifdef WEBRTC_MODULE_UTILITY_VIDEO
-    VideoCodec videoCodec;
-    if( _ptrFileUtilityObj->VideoCodecInst( videoCodec) != 0)
-    {
-        return -1;
-    }
-    memcpy(&codecInst,&videoCodec,sizeof(VideoCodec));
-    return 0;
-#else
-    return -1;
-#endif
-}
-
 bool MediaFileImpl::ValidFileFormat(const FileFormats format,
                                     const CodecInst*  codecInst)
 {
diff --git a/webrtc/modules/media_file/source/media_file_impl.h b/webrtc/modules/media_file/source/media_file_impl.h
index 388da34..9b14a86 100644
--- a/webrtc/modules/media_file/source/media_file_impl.h
+++ b/webrtc/modules/media_file/source/media_file_impl.h
@@ -32,11 +32,11 @@
     // MediaFile functions
     virtual int32_t PlayoutAudioData(int8_t* audioBuffer,
                                      size_t& dataLengthInBytes) OVERRIDE;
-    virtual int32_t PlayoutAVIVideoData(int8_t* videoBuffer,
-                                        size_t& dataLengthInBytes) OVERRIDE;
+
     virtual int32_t PlayoutStereoData(int8_t* audioBufferLeft,
                                       int8_t* audioBufferRight,
                                       size_t& dataLengthInBytes) OVERRIDE;
+
     virtual int32_t StartPlayingAudioFile(
         const char*  fileName,
         const uint32_t notificationTimeMs = 0,
@@ -45,51 +45,53 @@
         const CodecInst*     codecInst = NULL,
         const uint32_t startPointMs = 0,
         const uint32_t stopPointMs = 0) OVERRIDE;
-    virtual int32_t StartPlayingVideoFile(const char* fileName, const bool loop,
-                                          bool videoOnly,
-                                          const FileFormats format) OVERRIDE;
+
     virtual int32_t StartPlayingAudioStream(InStream& stream,
         const uint32_t notificationTimeMs = 0,
         const FileFormats format = kFileFormatPcm16kHzFile,
         const CodecInst* codecInst = NULL,
         const uint32_t startPointMs = 0,
         const uint32_t stopPointMs = 0) OVERRIDE;
+
     virtual int32_t StopPlaying() OVERRIDE;
+
     virtual bool IsPlaying() OVERRIDE;
+
     virtual int32_t PlayoutPositionMs(uint32_t& positionMs) const OVERRIDE;
+
     virtual int32_t IncomingAudioData(const int8_t* audioBuffer,
                                       const size_t bufferLength) OVERRIDE;
-    virtual int32_t IncomingAVIVideoData(const int8_t* audioBuffer,
-                                         const size_t bufferLength) OVERRIDE;
+
     virtual int32_t StartRecordingAudioFile(
         const char*  fileName,
         const FileFormats    format,
         const CodecInst&     codecInst,
         const uint32_t notificationTimeMs = 0,
         const uint32_t maxSizeBytes = 0) OVERRIDE;
-    virtual int32_t StartRecordingVideoFile(
-        const char* fileName,
-        const FileFormats   format,
-        const CodecInst&    codecInst,
-        const VideoCodec&   videoCodecInst,
-        bool                videoOnly = false) OVERRIDE;
+
     virtual int32_t StartRecordingAudioStream(
         OutStream&           stream,
         const FileFormats    format,
         const CodecInst&     codecInst,
         const uint32_t notificationTimeMs = 0) OVERRIDE;
+
     virtual int32_t StopRecording() OVERRIDE;
+
     virtual bool IsRecording() OVERRIDE;
+
     virtual int32_t RecordDurationMs(uint32_t& durationMs) OVERRIDE;
+
     virtual bool IsStereo() OVERRIDE;
+
     virtual int32_t SetModuleFileCallback(FileCallback* callback) OVERRIDE;
+
     virtual int32_t FileDurationMs(
         const char*  fileName,
         uint32_t&      durationMs,
         const FileFormats    format,
         const uint32_t freqInHz = 16000) OVERRIDE;
+
     virtual int32_t codec_info(CodecInst& codecInst) const OVERRIDE;
-    virtual int32_t VideoCodecInst(VideoCodec& codecInst) const OVERRIDE;
 
 private:
     // Returns true if the combination of format and codecInst is valid.
@@ -100,121 +102,24 @@
     // Returns true if the filename is valid
     static bool ValidFileName(const char* fileName);
 
-  // Returns true if the combination of startPointMs and stopPointMs is valid.
+    // Returns true if the combination of startPointMs and stopPointMs is valid.
     static bool ValidFilePositions(const uint32_t startPointMs,
                                    const uint32_t stopPointMs);
 
-    // Open the file specified by fileName for reading (relative path is
-    // allowed). FileCallback::PlayNotification(..) will be called after
-    // notificationTimeMs of the file has been played if notificationTimeMs is
-    // greater than zero. If loop is true the file will be played until
-    // StopPlaying() is called. When end of file is reached the file is read
-    // from the start. format specifies the type of file fileName refers to.
-    // codecInst specifies the encoding of the audio data. Note that
-    // file formats that contain this information (like WAV files) don't need to
-    // provide a non-NULL codecInst. Only video will be read if videoOnly is
-    // true. startPointMs and stopPointMs, unless zero,
-    // specify what part of the file should be read. From startPointMs ms to
-    // stopPointMs ms.
-    int32_t StartPlayingFile(
-        const char*  fileName,
-        const uint32_t notificationTimeMs = 0,
-        const bool           loop               = false,
-        bool                 videoOnly          = false,
-        const FileFormats    format             = kFileFormatPcm16kHzFile,
-        const CodecInst*     codecInst          = NULL,
-        const uint32_t startPointMs       = 0,
-        const uint32_t stopPointMs        = 0);
-
-    // Opens the file specified by fileName for reading (relative path is
-    // allowed) if format is kFileFormatAviFile otherwise use stream for
-    // reading. FileCallback::PlayNotification(..) will be called after
-    // notificationTimeMs of the file has been played if notificationTimeMs is
-    // greater than zero. If loop is true the file will be played until
-    // StopPlaying() is called. When end of file is reached the file is read
-    // from the start. format specifies the type of file fileName refers to.
-    // codecInst specifies the encoding of the audio data. Note that
-    // file formats that contain this information (like WAV files) don't need to
-    // provide a non-NULL codecInst. Only video will be read if videoOnly is
-    // true. startPointMs and stopPointMs, unless zero,
-    // specify what part of the file should be read. From startPointMs ms to
-    // stopPointMs ms.
-    // TODO (hellner): there is no reason why fileName should be needed here.
-    int32_t StartPlayingStream(
-        InStream&            stream,
-        const char*          fileName,
-        bool                 loop,
-        const uint32_t notificationTimeMs = 0,
-        const FileFormats    format             = kFileFormatPcm16kHzFile,
-        const CodecInst*     codecInst          = NULL,
-        const uint32_t startPointMs       = 0,
-        const uint32_t stopPointMs        = 0,
-        bool                 videoOnly          = true);
-
-    // Writes one frame into dataBuffer. dataLengthInBytes is both an input and
-    // output parameter. As input parameter it indicates the size of
-    // audioBuffer. As output parameter it indicates the number of bytes
-    // written to audioBuffer. If video is true the data written is a video
-    // frame otherwise it is an audio frame.
-    int32_t PlayoutData(int8_t* dataBuffer, size_t& dataLengthInBytes,
-                        bool video);
-
-    // Write one frame, i.e. the bufferLength first bytes of audioBuffer,
-    // to file. The frame is an audio frame if video is true otherwise it is an
-    // audio frame.
-    int32_t IncomingAudioVideoData(const int8_t* buffer,
-                                   const size_t bufferLength,
-                                   const bool video);
-
-    // Open/creates file specified by fileName for writing (relative path is
-    // allowed) if format is kFileFormatAviFile otherwise use stream for
-    // writing. FileCallback::RecordNotification(..) will be called after
-    // notificationTimeMs of audio data has been recorded if
-    // notificationTimeMs is greater than zero.
-    // format specifies the type of file that should be created/opened.
-    // codecInst specifies the encoding of the audio data. videoCodecInst
-    // specifies the encoding of the video data. maxSizeBytes specifies the
-    // number of bytes allowed to be written to file if it is greater than zero.
-    // If format is kFileFormatAviFile and videoOnly is true the AVI file will
-    // only contain video frames.
-    // Note: codecInst.channels should be set to 2 for stereo (and 1 for
-    // mono). Stereo is only supported for WAV files.
-    int32_t StartRecordingFile(
-        const char*  fileName,
-        const FileFormats    format,
-        const CodecInst&     codecInst,
-        const VideoCodec&    videoCodecInst,
-        const uint32_t notificationTimeMs = 0,
-        const uint32_t maxSizeBytes = 0,
-        bool                 videoOnly = false);
-
-    // Open/creates file specified by fileName for writing (relative path is
-    // allowed). FileCallback::RecordNotification(..) will be called after
-    // notificationTimeMs of audio data has been recorded if
-    // notificationTimeMs is greater than zero.
-    // format specifies the type of file that should be created/opened.
-    // codecInst specifies the encoding of the audio data. videoCodecInst
-    // specifies the encoding of the video data. maxSizeBytes specifies the
-    // number of bytes allowed to be written to file if it is greater than zero.
-    // If format is kFileFormatAviFile and videoOnly is true the AVI file will
-    // only contain video frames.
-    // Note: codecInst.channels should be set to 2 for stereo (and 1 for
-    // mono). Stereo is only supported for WAV files.
-    // TODO (hellner): there is no reason why fileName should be needed here.
-    int32_t StartRecordingStream(
-        OutStream&           stream,
-        const char*  fileName,
-        const FileFormats    format,
-        const CodecInst&     codecInst,
-        const VideoCodec&    videoCodecInst,
-        const uint32_t notificationTimeMs = 0,
-        const bool           videoOnly = false);
-
     // Returns true if frequencyInHz is a supported frequency.
     static bool ValidFrequency(const uint32_t frequencyInHz);
 
     void HandlePlayCallbacks(int32_t bytesRead);
 
+    int32_t StartPlayingStream(
+        InStream& stream,
+        bool loop,
+        const uint32_t notificationTimeMs,
+        const FileFormats format,
+        const CodecInst*  codecInst,
+        const uint32_t startPointMs,
+        const uint32_t stopPointMs);
+
     int32_t _id;
     CriticalSectionWrapper* _crit;
     CriticalSectionWrapper* _callbackCrit;
diff --git a/webrtc/modules/media_file/source/media_file_utility.cc b/webrtc/modules/media_file/source/media_file_utility.cc
index 7373100..85f045e 100644
--- a/webrtc/modules/media_file/source/media_file_utility.cc
+++ b/webrtc/modules/media_file/source/media_file_utility.cc
@@ -23,10 +23,6 @@
 #include "webrtc/system_wrappers/interface/file_wrapper.h"
 #include "webrtc/system_wrappers/interface/trace.h"
 
-#ifdef WEBRTC_MODULE_UTILITY_VIDEO
-    #include "avi_file.h"
-#endif
-
 namespace {
 
 // First 16 bytes the WAVE header. ckID should be "RIFF", wave_ckID should be
@@ -63,369 +59,19 @@
       _readPos(0),
       _reading(false),
       _writing(false),
-      _tempData()
-#ifdef WEBRTC_MODULE_UTILITY_VIDEO
-      ,
-      _aviAudioInFile(0),
-      _aviVideoInFile(0),
-      _aviOutFile(0)
-#endif
-{
+      _tempData() {
     WEBRTC_TRACE(kTraceMemory, kTraceFile, _id,
                  "ModuleFileUtility::ModuleFileUtility()");
     memset(&codec_info_,0,sizeof(CodecInst));
     codec_info_.pltype = -1;
-#ifdef WEBRTC_MODULE_UTILITY_VIDEO
-    memset(&_videoCodec,0,sizeof(_videoCodec));
-#endif
 }
 
 ModuleFileUtility::~ModuleFileUtility()
 {
     WEBRTC_TRACE(kTraceMemory, kTraceFile, _id,
                  "ModuleFileUtility::~ModuleFileUtility()");
-#ifdef WEBRTC_MODULE_UTILITY_VIDEO
-    delete _aviAudioInFile;
-    delete _aviVideoInFile;
-#endif
 }
 
-#ifdef WEBRTC_MODULE_UTILITY_VIDEO
-int32_t ModuleFileUtility::InitAviWriting(
-    const char* filename,
-    const CodecInst& audioCodecInst,
-    const VideoCodec& videoCodecInst,
-    const bool videoOnly /*= false*/)
-{
-    _writing = false;
-
-    delete _aviOutFile;
-    _aviOutFile = new AviFile( );
-
-    AVISTREAMHEADER videoStreamHeader;
-    videoStreamHeader.fccType = AviFile::MakeFourCc('v', 'i', 'd', 's');
-
-#ifdef VIDEOCODEC_I420
-    if (strncmp(videoCodecInst.plName, "I420", 7) == 0)
-    {
-        videoStreamHeader.fccHandler = AviFile::MakeFourCc('I','4','2','0');
-    }
-#endif
-#ifdef VIDEOCODEC_VP8
-    if (strncmp(videoCodecInst.plName, "VP8", 7) == 0)
-    {
-        videoStreamHeader.fccHandler = AviFile::MakeFourCc('V','P','8','0');
-    }
-#endif
-    if (videoStreamHeader.fccHandler == 0)
-    {
-        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
-                     "InitAviWriting() Codec not supported");
-
-        return -1;
-    }
-    videoStreamHeader.dwScale                = 1;
-    videoStreamHeader.dwRate                 = videoCodecInst.maxFramerate;
-    videoStreamHeader.dwSuggestedBufferSize  = videoCodecInst.height *
-        (videoCodecInst.width >> 1) * 3;
-    videoStreamHeader.dwQuality              = (uint32_t)-1;
-    videoStreamHeader.dwSampleSize           = 0;
-    videoStreamHeader.rcFrame.top            = 0;
-    videoStreamHeader.rcFrame.bottom         = videoCodecInst.height;
-    videoStreamHeader.rcFrame.left           = 0;
-    videoStreamHeader.rcFrame.right          = videoCodecInst.width;
-
-    BITMAPINFOHEADER bitMapInfoHeader;
-    bitMapInfoHeader.biSize         = sizeof(BITMAPINFOHEADER);
-    bitMapInfoHeader.biHeight       = videoCodecInst.height;
-    bitMapInfoHeader.biWidth        = videoCodecInst.width;
-    bitMapInfoHeader.biPlanes       = 1;
-    bitMapInfoHeader.biBitCount     = 12;
-    bitMapInfoHeader.biClrImportant = 0;
-    bitMapInfoHeader.biClrUsed      = 0;
-    bitMapInfoHeader.biCompression  = videoStreamHeader.fccHandler;
-    bitMapInfoHeader.biSizeImage    = bitMapInfoHeader.biWidth *
-        bitMapInfoHeader.biHeight * bitMapInfoHeader.biBitCount / 8;
-
-    if (_aviOutFile->CreateVideoStream(
-        videoStreamHeader,
-        bitMapInfoHeader,
-        NULL,
-        0) != 0)
-    {
-        return -1;
-    }
-
-    if(!videoOnly)
-    {
-        AVISTREAMHEADER audioStreamHeader;
-        audioStreamHeader.fccType = AviFile::MakeFourCc('a', 'u', 'd', 's');
-        // fccHandler is the FOURCC of the codec for decoding the stream.
-        // It's an optional parameter that is not used by audio streams.
-        audioStreamHeader.fccHandler   = 0;
-        audioStreamHeader.dwScale      = 1;
-
-        WAVEFORMATEX waveFormatHeader;
-        waveFormatHeader.cbSize          = 0;
-        waveFormatHeader.nChannels       = 1;
-
-        if (strncmp(audioCodecInst.plname, "PCMU", 4) == 0)
-        {
-            audioStreamHeader.dwSampleSize = 1;
-            audioStreamHeader.dwRate       = 8000;
-            audioStreamHeader.dwQuality    = (uint32_t)-1;
-            audioStreamHeader.dwSuggestedBufferSize = 80;
-
-            waveFormatHeader.nAvgBytesPerSec = 8000;
-            waveFormatHeader.nSamplesPerSec  = 8000;
-            waveFormatHeader.wBitsPerSample  = 8;
-            waveFormatHeader.nBlockAlign     = 1;
-            waveFormatHeader.wFormatTag      = kWavFormatMuLaw;
-
-        } else if (strncmp(audioCodecInst.plname, "PCMA", 4) == 0)
-        {
-            audioStreamHeader.dwSampleSize = 1;
-            audioStreamHeader.dwRate       = 8000;
-            audioStreamHeader.dwQuality    = (uint32_t)-1;
-            audioStreamHeader.dwSuggestedBufferSize = 80;
-
-            waveFormatHeader.nAvgBytesPerSec = 8000;
-            waveFormatHeader.nSamplesPerSec  = 8000;
-            waveFormatHeader.wBitsPerSample  = 8;
-            waveFormatHeader.nBlockAlign     = 1;
-            waveFormatHeader.wFormatTag      = kWavFormatALaw;
-
-        } else if (strncmp(audioCodecInst.plname, "L16", 3) == 0)
-        {
-            audioStreamHeader.dwSampleSize = 2;
-            audioStreamHeader.dwRate       = audioCodecInst.plfreq;
-            audioStreamHeader.dwQuality    = (uint32_t)-1;
-            audioStreamHeader.dwSuggestedBufferSize =
-                (audioCodecInst.plfreq/100) * 2;
-
-            waveFormatHeader.nAvgBytesPerSec = audioCodecInst.plfreq * 2;
-            waveFormatHeader.nSamplesPerSec  = audioCodecInst.plfreq;
-            waveFormatHeader.wBitsPerSample  = 16;
-            waveFormatHeader.nBlockAlign     = 2;
-            waveFormatHeader.wFormatTag      = kWavFormatPcm;
-        } else
-        {
-            return -1;
-        }
-
-        if(_aviOutFile->CreateAudioStream(
-            audioStreamHeader,
-            waveFormatHeader) != 0)
-        {
-            return -1;
-        }
-
-
-        if( InitWavCodec(waveFormatHeader.nSamplesPerSec,
-            waveFormatHeader.nChannels,
-            waveFormatHeader.wBitsPerSample,
-            waveFormatHeader.wFormatTag) != 0)
-        {
-            return -1;
-        }
-    }
-    _aviOutFile->Create(filename);
-    _writing = true;
-    return 0;
-}
-
-int32_t ModuleFileUtility::WriteAviAudioData(
-    const int8_t* buffer,
-    size_t bufferLengthInBytes)
-{
-    if( _aviOutFile != 0)
-    {
-        return _aviOutFile->WriteAudio(
-            reinterpret_cast<const uint8_t*>(buffer),
-            bufferLengthInBytes);
-    }
-    else
-    {
-        WEBRTC_TRACE(kTraceError, kTraceFile, _id, "AVI file not initialized");
-        return -1;
-    }
-}
-
-int32_t ModuleFileUtility::WriteAviVideoData(
-        const int8_t* buffer,
-        size_t bufferLengthInBytes)
-{
-    if( _aviOutFile != 0)
-    {
-        return _aviOutFile->WriteVideo(
-            reinterpret_cast<const uint8_t*>(buffer),
-            bufferLengthInBytes);
-    }
-    else
-    {
-        WEBRTC_TRACE(kTraceError, kTraceFile, _id, "AVI file not initialized");
-        return -1;
-    }
-}
-
-
-int32_t ModuleFileUtility::CloseAviFile( )
-{
-    if( _reading && _aviAudioInFile)
-    {
-        delete _aviAudioInFile;
-        _aviAudioInFile = 0;
-    }
-
-    if( _reading && _aviVideoInFile)
-    {
-        delete _aviVideoInFile;
-        _aviVideoInFile = 0;
-    }
-
-    if( _writing && _aviOutFile)
-    {
-        delete _aviOutFile;
-        _aviOutFile = 0;
-    }
-    return 0;
-}
-
-
-int32_t ModuleFileUtility::InitAviReading(const char* filename, bool videoOnly,
-                                          bool loop)
-{
-    _reading = false;
-    delete _aviVideoInFile;
-    _aviVideoInFile = new AviFile( );
-
-    if ((_aviVideoInFile != 0) && _aviVideoInFile->Open(AviFile::AVI_VIDEO,
-                                                        filename, loop) == -1)
-    {
-        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
-                     "Unable to open AVI file (video)");
-        return -1;
-    }
-
-
-    AVISTREAMHEADER videoInStreamHeader;
-    BITMAPINFOHEADER bitmapInfo;
-    char codecConfigParameters[AviFile::CODEC_CONFIG_LENGTH] = {};
-    int32_t configLength = 0;
-    if( _aviVideoInFile->GetVideoStreamInfo(videoInStreamHeader, bitmapInfo,
-                                            codecConfigParameters,
-                                            configLength) != 0)
-    {
-        return -1;
-    }
-    _videoCodec.width = static_cast<uint16_t>(
-        videoInStreamHeader.rcFrame.right);
-    _videoCodec.height = static_cast<uint16_t>(
-        videoInStreamHeader.rcFrame.bottom);
-    _videoCodec.maxFramerate = static_cast<uint8_t>(
-        videoInStreamHeader.dwRate);
-
-    const size_t plnameLen = sizeof(_videoCodec.plName) / sizeof(char);
-    if (bitmapInfo.biCompression == AviFile::MakeFourCc('I','4','2','0'))
-    {
-        strncpy(_videoCodec.plName, "I420", plnameLen);
-       _videoCodec.codecType = kVideoCodecI420;
-    }
-    else if (bitmapInfo.biCompression ==
-             AviFile::MakeFourCc('V', 'P', '8', '0'))
-    {
-        strncpy(_videoCodec.plName, "VP8", plnameLen);
-        _videoCodec.codecType = kVideoCodecVP8;
-    }
-    else
-    {
-        return -1;
-    }
-
-    if(!videoOnly)
-    {
-        delete _aviAudioInFile;
-        _aviAudioInFile = new AviFile();
-
-        if ( (_aviAudioInFile != 0) &&
-            _aviAudioInFile->Open(AviFile::AVI_AUDIO, filename, loop) == -1)
-        {
-            WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
-                         "Unable to open AVI file (audio)");
-            return -1;
-        }
-
-        WAVEFORMATEX waveHeader;
-        if(_aviAudioInFile->GetAudioStreamInfo(waveHeader) != 0)
-        {
-            return -1;
-        }
-        if(InitWavCodec(waveHeader.nSamplesPerSec, waveHeader.nChannels,
-                        waveHeader.wBitsPerSample, waveHeader.wFormatTag) != 0)
-        {
-            return -1;
-        }
-    }
-    _reading = true;
-    return 0;
-}
-
-int32_t ModuleFileUtility::ReadAviAudioData(
-    int8_t*  outBuffer,
-    size_t bufferLengthInBytes)
-{
-    if(_aviAudioInFile == 0)
-    {
-        WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "AVI file not opened.");
-        return -1;
-    }
-
-    if(_aviAudioInFile->ReadAudio(reinterpret_cast<uint8_t*>(outBuffer),
-                                  bufferLengthInBytes) != 0)
-    {
-        return -1;
-    }
-    else
-    {
-        return static_cast<int32_t>(bufferLengthInBytes);
-    }
-}
-
-int32_t ModuleFileUtility::ReadAviVideoData(
-    int8_t* outBuffer,
-    size_t bufferLengthInBytes)
-{
-    if(_aviVideoInFile == 0)
-    {
-        WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "AVI file not opened.");
-        return -1;
-    }
-
-    if(_aviVideoInFile->ReadVideo(reinterpret_cast<uint8_t*>(outBuffer),
-                                  bufferLengthInBytes) != 0)
-    {
-        return -1;
-    } else {
-        return static_cast<int32_t>(bufferLengthInBytes);
-    }
-}
-
-int32_t ModuleFileUtility::VideoCodecInst(VideoCodec& codecInst)
-{
-    WEBRTC_TRACE(kTraceStream, kTraceFile, _id,
-               "ModuleFileUtility::CodecInst(codecInst= 0x%x)", &codecInst);
-
-   if(!_reading)
-    {
-        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
-                     "CodecInst: not currently reading audio file!");
-        return -1;
-    }
-    memcpy(&codecInst,&_videoCodec,sizeof(VideoCodec));
-    return 0;
-}
-#endif
-
 int32_t ModuleFileUtility::ReadWavHeader(InStream& wav)
 {
     WAVE_RIFF_header RIFFheaderObj;
diff --git a/webrtc/modules/media_file/source/media_file_utility.h b/webrtc/modules/media_file/source/media_file_utility.h
index d8fefcc..2823cec 100644
--- a/webrtc/modules/media_file/source/media_file_utility.h
+++ b/webrtc/modules/media_file/source/media_file_utility.h
@@ -18,7 +18,6 @@
 #include "webrtc/modules/media_file/interface/media_file_defines.h"
 
 namespace webrtc {
-class AviFile;
 class InStream;
 class OutStream;
 
@@ -29,61 +28,6 @@
     ModuleFileUtility(const int32_t id);
     ~ModuleFileUtility();
 
-#ifdef WEBRTC_MODULE_UTILITY_VIDEO
-    // Open the file specified by fileName for reading (relative path is
-    // allowed). If loop is true the file will be played until StopPlaying() is
-    // called. When end of file is reached the file is read from the start.
-    // Only video will be read if videoOnly is true.
-    int32_t InitAviReading(const char* fileName, bool videoOnly, bool loop);
-
-    // Put 10-60ms of audio data from file into the outBuffer depending on
-    // codec frame size. bufferLengthInBytes indicates the size of outBuffer.
-    // The return value is the number of bytes written to audioBuffer.
-    // Note: This API only play mono audio but can be used on file containing
-    // audio with more channels (in which case the audio will be coverted to
-    // mono).
-    int32_t ReadAviAudioData(int8_t* outBuffer,
-                             size_t bufferLengthInBytes);
-
-    // Put one video frame into outBuffer. bufferLengthInBytes indicates the
-    // size of outBuffer.
-    // The return value is the number of bytes written to videoBuffer.
-    int32_t ReadAviVideoData(int8_t* videoBuffer,
-                             size_t bufferLengthInBytes);
-
-    // Open/create the file specified by fileName for writing audio/video data
-    // (relative path is allowed). codecInst specifies the encoding of the audio
-    // data. videoCodecInst specifies the encoding of the video data. Only video
-    // data will be recorded if videoOnly is true.
-    int32_t InitAviWriting(const char* filename,
-                           const CodecInst& codecInst,
-                           const VideoCodec& videoCodecInst,
-                           const bool videoOnly);
-
-    // Write one audio frame, i.e. the bufferLengthinBytes first bytes of
-    // audioBuffer, to file. The audio frame size is determined by the
-    // codecInst.pacsize parameter of the last sucessfull
-    // InitAviWriting(..) call.
-    // Note: bufferLength must be exactly one frame.
-    int32_t WriteAviAudioData(const int8_t* audioBuffer,
-                              size_t bufferLengthInBytes);
-
-
-    // Write one video frame, i.e. the bufferLength first bytes of videoBuffer,
-    // to file.
-    // Note: videoBuffer can contain encoded data. The codec used must be the
-    // same as what was specified by videoCodecInst for the last successfull
-    // InitAviWriting(..) call. The videoBuffer must contain exactly
-    // one video frame.
-    int32_t WriteAviVideoData(const int8_t* videoBuffer,
-                              size_t bufferLengthInBytes);
-
-    // Stop recording to file or stream.
-    int32_t CloseAviFile();
-
-    int32_t VideoCodecInst(VideoCodec& codecInst);
-#endif // #ifdef WEBRTC_MODULE_UTILITY_VIDEO
-
     // Prepare for playing audio from stream.
     // startPointMs and stopPointMs, unless zero, specify what part of the file
     // should be read. From startPointMs ms to stopPointMs ms.
@@ -335,13 +279,6 @@
 
     // Scratch buffer used for turning stereo audio to mono.
     uint8_t _tempData[WAV_MAX_BUFFER_SIZE];
-
-#ifdef WEBRTC_MODULE_UTILITY_VIDEO
-    AviFile* _aviAudioInFile;
-    AviFile* _aviVideoInFile;
-    AviFile* _aviOutFile;
-    VideoCodec _videoCodec;
-#endif
 };
 }  // namespace webrtc
 #endif // WEBRTC_MODULES_MEDIA_FILE_SOURCE_MEDIA_FILE_UTILITY_H_
diff --git a/webrtc/modules/utility/BUILD.gn b/webrtc/modules/utility/BUILD.gn
index 2cb290f..4503be6 100644
--- a/webrtc/modules/utility/BUILD.gn
+++ b/webrtc/modules/utility/BUILD.gn
@@ -46,13 +46,4 @@
     "../audio_coding",
     "../media_file",
   ]
-  if (rtc_enable_video) {
-    sources += [
-      "source/frame_scaler.cc",
-      "source/video_coder.cc",
-      "source/video_frames_queue.cc",
-    ]
-
-    deps += [ "../video_coding" ]
-  }
 }
diff --git a/webrtc/modules/utility/interface/file_player.h b/webrtc/modules/utility/interface/file_player.h
index 0031f6a..d812deb 100644
--- a/webrtc/modules/utility/interface/file_player.h
+++ b/webrtc/modules/utility/interface/file_player.h
@@ -27,8 +27,7 @@
     enum {MAX_AUDIO_BUFFER_IN_SAMPLES = 60*32};
     enum {MAX_AUDIO_BUFFER_IN_BYTES = MAX_AUDIO_BUFFER_IN_SAMPLES*2};
 
-    // Note: will return NULL for video file formats (e.g. AVI) if the flag
-    //       WEBRTC_MODULE_UTILITY_VIDEO is not defined.
+    // Note: will return NULL for unsupported formats.
     static FilePlayer* CreateFilePlayer(const uint32_t instanceID,
                                         const FileFormats fileFormat);
 
diff --git a/webrtc/modules/utility/interface/file_recorder.h b/webrtc/modules/utility/interface/file_recorder.h
index d75dd57..f0ceccb 100644
--- a/webrtc/modules/utility/interface/file_recorder.h
+++ b/webrtc/modules/utility/interface/file_recorder.h
@@ -26,8 +26,7 @@
 {
 public:
 
-    // Note: will return NULL for video file formats (e.g. AVI) if the flag
-    //       WEBRTC_MODULE_UTILITY_VIDEO is not defined.
+    // Note: will return NULL for unsupported formats.
     static FileRecorder* CreateFileRecorder(const uint32_t instanceID,
                                             const FileFormats fileFormat);
 
diff --git a/webrtc/modules/utility/source/file_player_impl.cc b/webrtc/modules/utility/source/file_player_impl.cc
index 5d935fb..1803e8a 100644
--- a/webrtc/modules/utility/source/file_player_impl.cc
+++ b/webrtc/modules/utility/source/file_player_impl.cc
@@ -11,12 +11,6 @@
 #include "webrtc/modules/utility/source/file_player_impl.h"
 #include "webrtc/system_wrappers/interface/logging.h"
 
-#ifdef WEBRTC_MODULE_UTILITY_VIDEO
-    #include "webrtc/modules/utility/source/frame_scaler.h"
-    #include "webrtc/modules/utility/source/video_coder.h"
-    #include "webrtc/system_wrappers/interface/tick_util.h"
-#endif
-
 namespace webrtc {
 FilePlayer* FilePlayer::CreateFilePlayer(uint32_t instanceID,
                                          FileFormats fileFormat)
@@ -31,16 +25,10 @@
     case kFileFormatPcm32kHzFile:
         // audio formats
         return new FilePlayerImpl(instanceID, fileFormat);
-    case kFileFormatAviFile:
-#ifdef WEBRTC_MODULE_UTILITY_VIDEO
-        return new VideoFilePlayerImpl(instanceID, fileFormat);
-#else
+    default:
         assert(false);
         return NULL;
-#endif
     }
-    assert(false);
-    return NULL;
 }
 
 void FilePlayer::DestroyFilePlayer(FilePlayer* player)
@@ -412,258 +400,4 @@
     _numberOf10MsInDecoder = 0;
     return 0;
 }
-
-#ifdef WEBRTC_MODULE_UTILITY_VIDEO
-VideoFilePlayerImpl::VideoFilePlayerImpl(uint32_t instanceID,
-                                         FileFormats fileFormat)
-    : FilePlayerImpl(instanceID, fileFormat),
-      video_decoder_(new VideoCoder()),
-      video_codec_info_(),
-      _decodedVideoFrames(0),
-      _encodedData(*new EncodedVideoData()),
-      _frameScaler(*new FrameScaler()),
-      _critSec(CriticalSectionWrapper::CreateCriticalSection()),
-      _startTime(),
-      _accumulatedRenderTimeMs(0),
-      _frameLengthMS(0),
-      _numberOfFramesRead(0),
-      _videoOnly(false) {
-  memset(&video_codec_info_, 0, sizeof(video_codec_info_));
-}
-
-VideoFilePlayerImpl::~VideoFilePlayerImpl()
-{
-    delete _critSec;
-    delete &_frameScaler;
-    video_decoder_.reset();
-    delete &_encodedData;
-}
-
-int32_t VideoFilePlayerImpl::StartPlayingVideoFile(
-    const char* fileName,
-    bool loop,
-    bool videoOnly)
-{
-    CriticalSectionScoped lock( _critSec);
-
-    if(_fileModule.StartPlayingVideoFile(fileName, loop, videoOnly,
-                                         _fileFormat) != 0)
-    {
-        return -1;
-    }
-
-    _decodedVideoFrames = 0;
-    _accumulatedRenderTimeMs = 0;
-    _frameLengthMS = 0;
-    _numberOfFramesRead = 0;
-    _videoOnly = videoOnly;
-
-    // Set up video_codec_info_ according to file,
-    if(SetUpVideoDecoder() != 0)
-    {
-        StopPlayingFile();
-        return -1;
-    }
-    if(!videoOnly)
-    {
-        // Set up _codec according to file,
-        if(SetUpAudioDecoder() != 0)
-        {
-            StopPlayingFile();
-            return -1;
-        }
-    }
-    return 0;
-}
-
-int32_t VideoFilePlayerImpl::StopPlayingFile()
-{
-    CriticalSectionScoped lock( _critSec);
-
-    _decodedVideoFrames = 0;
-    video_decoder_.reset(new VideoCoder());
-
-    return FilePlayerImpl::StopPlayingFile();
-}
-
-int32_t VideoFilePlayerImpl::GetVideoFromFile(I420VideoFrame& videoFrame,
-                                              uint32_t outWidth,
-                                              uint32_t outHeight)
-{
-    CriticalSectionScoped lock( _critSec);
-
-    int32_t retVal = GetVideoFromFile(videoFrame);
-    if(retVal != 0)
-    {
-        return retVal;
-    }
-    if (!videoFrame.IsZeroSize())
-    {
-        retVal = _frameScaler.ResizeFrameIfNeeded(&videoFrame, outWidth,
-                                                  outHeight);
-    }
-    return retVal;
-}
-
-int32_t VideoFilePlayerImpl::GetVideoFromFile(I420VideoFrame& videoFrame)
-{
-    CriticalSectionScoped lock( _critSec);
-    // No new video data read from file.
-    if(_encodedData.payloadSize == 0)
-    {
-        videoFrame.ResetSize();
-        return -1;
-    }
-    int32_t retVal = 0;
-    if(strncmp(video_codec_info_.plName, "I420", 5) == 0)
-    {
-      int size_y = video_codec_info_.width * video_codec_info_.height;
-      int half_width = (video_codec_info_.width + 1) / 2;
-      int half_height = (video_codec_info_.height + 1) / 2;
-      int size_uv = half_width * half_height;
-
-      // TODO(mikhal): Do we need to align the stride here?
-      const uint8_t* buffer_y = _encodedData.payloadData;
-      const uint8_t* buffer_u = buffer_y + size_y;
-      const uint8_t* buffer_v = buffer_u + size_uv;
-      videoFrame.CreateFrame(size_y, buffer_y,
-                             size_uv, buffer_u,
-                             size_uv, buffer_v,
-                             video_codec_info_.width, video_codec_info_.height,
-                             video_codec_info_.height, half_width, half_width);
-    }else
-    {
-        // Set the timestamp manually since there is no timestamp in the file.
-        // Update timestam according to 90 kHz stream.
-        _encodedData.timeStamp += (90000 / video_codec_info_.maxFramerate);
-        retVal = video_decoder_->Decode(videoFrame, _encodedData);
-    }
-
-    int64_t renderTimeMs = TickTime::MillisecondTimestamp();
-    videoFrame.set_render_time_ms(renderTimeMs);
-
-     // Indicate that the current frame in the encoded buffer is old/has
-     // already been read.
-    _encodedData.payloadSize = 0;
-    if( retVal == 0)
-    {
-        _decodedVideoFrames++;
-    }
-    return retVal;
-}
-
-int32_t VideoFilePlayerImpl::video_codec_info(
-    VideoCodec& videoCodec) const
-{
-    if(video_codec_info_.plName[0] == 0)
-    {
-        return -1;
-    }
-    memcpy(&videoCodec, &video_codec_info_, sizeof(VideoCodec));
-    return 0;
-}
-
-int32_t VideoFilePlayerImpl::TimeUntilNextVideoFrame()
-{
-    if(_fileFormat != kFileFormatAviFile)
-    {
-        return -1;
-    }
-    if(!_fileModule.IsPlaying())
-    {
-        return -1;
-    }
-    if(_encodedData.payloadSize <= 0)
-    {
-        // Read next frame from file.
-        CriticalSectionScoped lock( _critSec);
-
-        if(_fileFormat == kFileFormatAviFile)
-        {
-            // Get next video frame
-            size_t encodedBufferLengthInBytes = _encodedData.bufferSize;
-            if(_fileModule.PlayoutAVIVideoData(
-                   reinterpret_cast< int8_t*>(_encodedData.payloadData),
-                   encodedBufferLengthInBytes) != 0)
-            {
-                LOG(LS_WARNING) << "Error reading video data.";
-                return -1;
-            }
-            _encodedData.payloadSize = encodedBufferLengthInBytes;
-            _encodedData.codec = video_codec_info_.codecType;
-            _numberOfFramesRead++;
-
-            if(_accumulatedRenderTimeMs == 0)
-            {
-                _startTime = TickTime::Now();
-                // This if-statement should only trigger once.
-                _accumulatedRenderTimeMs = 1;
-            } else {
-                // A full seconds worth of frames have been read.
-                if(_numberOfFramesRead % video_codec_info_.maxFramerate == 0)
-                {
-                    // Frame rate is in frames per seconds. Frame length is
-                    // calculated as an integer division which means it may
-                    // be rounded down. Compensate for this every second.
-                    uint32_t rest = 1000%_frameLengthMS;
-                    _accumulatedRenderTimeMs += rest;
-                }
-                _accumulatedRenderTimeMs += _frameLengthMS;
-            }
-        }
-    }
-
-    int64_t timeToNextFrame;
-    if(_videoOnly)
-    {
-        timeToNextFrame = _accumulatedRenderTimeMs -
-            (TickTime::Now() - _startTime).Milliseconds();
-
-    } else {
-        // Synchronize with the audio stream instead of system clock.
-        timeToNextFrame = _accumulatedRenderTimeMs - _decodedLengthInMS;
-    }
-    if(timeToNextFrame < 0)
-    {
-        return 0;
-
-    } else if(timeToNextFrame > 0x0fffffff)
-    {
-        // Wraparound or audio stream has gone to far ahead of the video stream.
-        return -1;
-    }
-    return static_cast<int32_t>(timeToNextFrame);
-}
-
-int32_t VideoFilePlayerImpl::SetUpVideoDecoder()
-{
-    if (_fileModule.VideoCodecInst(video_codec_info_) != 0)
-    {
-        LOG(LS_WARNING) << "SetVideoDecoder() failed to retrieve codec info of "
-                        << "file data.";
-        return -1;
-    }
-
-    int32_t useNumberOfCores = 1;
-    if (video_decoder_->SetDecodeCodec(video_codec_info_, useNumberOfCores) !=
-        0) {
-        LOG(LS_WARNING) << "SetUpVideoDecoder() codec "
-                        << video_codec_info_.plName << " not supported.";
-        return -1;
-    }
-
-    _frameLengthMS = 1000/video_codec_info_.maxFramerate;
-
-    // Size of unencoded data (I420) should be the largest possible frame size
-    // in a file.
-    const size_t KReadBufferSize = 3 * video_codec_info_.width *
-        video_codec_info_.height / 2;
-    _encodedData.VerifyAndAllocate(KReadBufferSize);
-    _encodedData.encodedHeight = video_codec_info_.height;
-    _encodedData.encodedWidth = video_codec_info_.width;
-    _encodedData.payloadType = video_codec_info_.plType;
-    _encodedData.timeStamp = 0;
-    return 0;
-}
-#endif // WEBRTC_MODULE_UTILITY_VIDEO
 }  // namespace webrtc
diff --git a/webrtc/modules/utility/source/file_player_impl.h b/webrtc/modules/utility/source/file_player_impl.h
index 3093ce2..f81e710 100644
--- a/webrtc/modules/utility/source/file_player_impl.h
+++ b/webrtc/modules/utility/source/file_player_impl.h
@@ -23,9 +23,6 @@
 #include "webrtc/typedefs.h"
 
 namespace webrtc {
-class VideoCoder;
-class FrameScaler;
-
 class FilePlayerImpl : public FilePlayer
 {
 public:
@@ -78,45 +75,5 @@
     Resampler _resampler;
     float _scaling;
 };
-
-#ifdef WEBRTC_MODULE_UTILITY_VIDEO
-class VideoFilePlayerImpl: public FilePlayerImpl
-{
-public:
-    VideoFilePlayerImpl(uint32_t instanceID, FileFormats fileFormat);
-    ~VideoFilePlayerImpl();
-
-    // FilePlayer functions.
-    virtual int32_t TimeUntilNextVideoFrame();
-    virtual int32_t StartPlayingVideoFile(const char* fileName,
-                                          bool loop,
-                                          bool videoOnly);
-    virtual int32_t StopPlayingFile();
-    virtual int32_t video_codec_info(VideoCodec& videoCodec) const;
-    virtual int32_t GetVideoFromFile(I420VideoFrame& videoFrame);
-    virtual int32_t GetVideoFromFile(I420VideoFrame& videoFrame,
-                                     const uint32_t outWidth,
-                                     const uint32_t outHeight);
-
-private:
-    int32_t SetUpVideoDecoder();
-
-    rtc::scoped_ptr<VideoCoder> video_decoder_;
-    VideoCodec video_codec_info_;
-    int32_t _decodedVideoFrames;
-
-    EncodedVideoData& _encodedData;
-
-    FrameScaler& _frameScaler;
-    CriticalSectionWrapper* _critSec;
-    TickTime _startTime;
-    int64_t _accumulatedRenderTimeMs;
-    uint32_t _frameLengthMS;
-
-    int32_t _numberOfFramesRead;
-    bool _videoOnly;
-};
-#endif //WEBRTC_MODULE_UTILITY_VIDEO
-
 }  // namespace webrtc
 #endif // WEBRTC_MODULES_UTILITY_SOURCE_FILE_PLAYER_IMPL_H_
diff --git a/webrtc/modules/utility/source/file_recorder_impl.cc b/webrtc/modules/utility/source/file_recorder_impl.cc
index 11f70f6..29eede8 100644
--- a/webrtc/modules/utility/source/file_recorder_impl.cc
+++ b/webrtc/modules/utility/source/file_recorder_impl.cc
@@ -14,36 +14,11 @@
 #include "webrtc/modules/utility/source/file_recorder_impl.h"
 #include "webrtc/system_wrappers/interface/logging.h"
 
-#ifdef WEBRTC_MODULE_UTILITY_VIDEO
-    #include "webrtc/modules/utility/source/frame_scaler.h"
-    #include "webrtc/modules/utility/source/video_coder.h"
-    #include "webrtc/modules/utility/source/video_frames_queue.h"
-    #include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
-#endif
-
 namespace webrtc {
 FileRecorder* FileRecorder::CreateFileRecorder(uint32_t instanceID,
                                                FileFormats fileFormat)
 {
-    switch(fileFormat)
-    {
-    case kFileFormatWavFile:
-    case kFileFormatCompressedFile:
-    case kFileFormatPreencodedFile:
-    case kFileFormatPcm16kHzFile:
-    case kFileFormatPcm8kHzFile:
-    case kFileFormatPcm32kHzFile:
-        return new FileRecorderImpl(instanceID, fileFormat);
-    case kFileFormatAviFile:
-#ifdef WEBRTC_MODULE_UTILITY_VIDEO
-        return new AviRecorder(instanceID, fileFormat);
-#else
-        assert(false);
-        return NULL;
-#endif
-    }
-    assert(false);
-    return NULL;
+    return new FileRecorderImpl(instanceID, fileFormat);
 }
 
 void FileRecorder::DestroyFileRecorder(FileRecorder* recorder)
@@ -98,14 +73,9 @@
     _amrFormat = amrFormat;
 
     int32_t retVal = 0;
-    if(_fileFormat != kFileFormatAviFile)
-    {
-        // AVI files should be started using StartRecordingVideoFile(..) all
-        // other formats should use this API.
-        retVal =_moduleFile->StartRecordingAudioFile(fileName, _fileFormat,
-                                                     codecInst,
-                                                     notificationTimeMs);
-    }
+    retVal =_moduleFile->StartRecordingAudioFile(fileName, _fileFormat,
+                                                 codecInst,
+                                                 notificationTimeMs);
 
     if( retVal == 0)
     {
@@ -314,410 +284,4 @@
 {
     return _moduleFile->IncomingAudioData(audioBuffer, bufferLength);
 }
-
-
-#ifdef WEBRTC_MODULE_UTILITY_VIDEO
-AviRecorder::AviRecorder(uint32_t instanceID, FileFormats fileFormat)
-    : FileRecorderImpl(instanceID, fileFormat),
-      _videoOnly(false),
-      _thread( 0),
-      _timeEvent(*EventWrapper::Create()),
-      _critSec(CriticalSectionWrapper::CreateCriticalSection()),
-      _writtenVideoFramesCounter(0),
-      _writtenAudioMS(0),
-      _writtenVideoMS(0)
-{
-    _videoEncoder = new VideoCoder();
-    _frameScaler = new FrameScaler();
-    _videoFramesQueue = new VideoFramesQueue();
-    _thread = ThreadWrapper::CreateThread(Run, this, kNormalPriority,
-                                          "AviRecorder()");
-}
-
-AviRecorder::~AviRecorder( )
-{
-    StopRecording( );
-
-    delete _videoEncoder;
-    delete _frameScaler;
-    delete _videoFramesQueue;
-    delete _thread;
-    delete &_timeEvent;
-    delete _critSec;
-}
-
-int32_t AviRecorder::StartRecordingVideoFile(
-    const char* fileName,
-    const CodecInst& audioCodecInst,
-    const VideoCodec& videoCodecInst,
-    ACMAMRPackingFormat amrFormat,
-    bool videoOnly)
-{
-    _firstAudioFrameReceived = false;
-    _videoCodecInst = videoCodecInst;
-    _videoOnly = videoOnly;
-
-    if(_moduleFile->StartRecordingVideoFile(fileName, _fileFormat,
-                                            audioCodecInst, videoCodecInst,
-                                            videoOnly) != 0)
-    {
-        return -1;
-    }
-
-    if(!videoOnly)
-    {
-        if(FileRecorderImpl::StartRecordingAudioFile(fileName,audioCodecInst, 0,
-                                                     amrFormat) !=0)
-        {
-            StopRecording();
-            return -1;
-        }
-    }
-    if( SetUpVideoEncoder() != 0)
-    {
-        StopRecording();
-        return -1;
-    }
-    if(_videoOnly)
-    {
-        // Writing to AVI file is non-blocking.
-        // Start non-blocking timer if video only. If recording both video and
-        // audio let the pushing of audio frames be the timer.
-        _timeEvent.StartTimer(true, 1000 / _videoCodecInst.maxFramerate);
-    }
-    StartThread();
-    return 0;
-}
-
-int32_t AviRecorder::StopRecording()
-{
-    _timeEvent.StopTimer();
-
-    StopThread();
-    return FileRecorderImpl::StopRecording();
-}
-
-size_t AviRecorder::CalcI420FrameSize( ) const
-{
-    return 3 * _videoCodecInst.width * _videoCodecInst.height / 2;
-}
-
-int32_t AviRecorder::SetUpVideoEncoder()
-{
-    // Size of unencoded data (I420) should be the largest possible frame size
-    // in a file.
-    _videoMaxPayloadSize = CalcI420FrameSize();
-    _videoEncodedData.VerifyAndAllocate(_videoMaxPayloadSize);
-
-    _videoCodecInst.plType = _videoEncoder->DefaultPayloadType(
-        _videoCodecInst.plName);
-
-    int32_t useNumberOfCores = 1;
-    // Set the max payload size to 16000. This means that the codec will try to
-    // create slices that will fit in 16000 kByte packets. However, the
-    // Encode() call will still generate one full frame.
-    if(_videoEncoder->SetEncodeCodec(_videoCodecInst, useNumberOfCores,
-                                     16000))
-    {
-        return -1;
-    }
-    return 0;
-}
-
-int32_t AviRecorder::RecordVideoToFile(const I420VideoFrame& videoFrame)
-{
-    CriticalSectionScoped lock(_critSec);
-    if(!IsRecording() || videoFrame.IsZeroSize())
-    {
-        return -1;
-    }
-    // The frame is written to file in AviRecorder::Process().
-    int32_t retVal = _videoFramesQueue->AddFrame(videoFrame);
-    if(retVal != 0)
-    {
-        StopRecording();
-    }
-    return retVal;
-}
-
-bool AviRecorder::StartThread()
-{
-    unsigned int id;
-    if( _thread == 0)
-    {
-        return false;
-    }
-
-    return _thread->Start(id);
-}
-
-bool AviRecorder::StopThread()
-{
-    _critSec->Enter();
-
-    if(_thread)
-    {
-        ThreadWrapper* thread = _thread;
-        _thread = NULL;
-
-        _timeEvent.Set();
-
-        _critSec->Leave();
-
-        if(thread->Stop())
-        {
-            delete thread;
-        } else {
-            return false;
-        }
-    } else {
-        _critSec->Leave();
-    }
-    return true;
-}
-
-bool AviRecorder::Run( ThreadObj threadObj)
-{
-    return static_cast<AviRecorder*>( threadObj)->Process();
-}
-
-int32_t AviRecorder::ProcessAudio()
-{
-    if (_writtenVideoFramesCounter == 0)
-    {
-        // Get the most recent frame that is due for writing to file. Since
-        // frames are unencoded it's safe to throw away frames if necessary
-        // for synchronizing audio and video.
-        I420VideoFrame* frameToProcess = _videoFramesQueue->FrameToRecord();
-        if(frameToProcess)
-        {
-            // Syncronize audio to the current frame to process by throwing away
-            // audio samples with older timestamp than the video frame.
-            size_t numberOfAudioElements =
-                _audioFramesToWrite.size();
-            for (size_t i = 0; i < numberOfAudioElements; ++i)
-            {
-                AudioFrameFileInfo* frameInfo = _audioFramesToWrite.front();
-                if(TickTime::TicksToMilliseconds(
-                       frameInfo->_playoutTS.Ticks()) <
-                   frameToProcess->render_time_ms())
-                {
-                    delete frameInfo;
-                    _audioFramesToWrite.pop_front();
-                } else
-                {
-                    break;
-                }
-            }
-        }
-    }
-    // Write all audio up to current timestamp.
-    int32_t error = 0;
-    size_t numberOfAudioElements = _audioFramesToWrite.size();
-    for (size_t i = 0; i < numberOfAudioElements; ++i)
-    {
-        AudioFrameFileInfo* frameInfo = _audioFramesToWrite.front();
-        if((TickTime::Now() - frameInfo->_playoutTS).Milliseconds() > 0)
-        {
-            _moduleFile->IncomingAudioData(frameInfo->_audioData,
-                                           frameInfo->_audioSize);
-            _writtenAudioMS += frameInfo->_audioMS;
-            delete frameInfo;
-            _audioFramesToWrite.pop_front();
-        } else {
-            break;
-        }
-    }
-    return error;
-}
-
-bool AviRecorder::Process()
-{
-    switch(_timeEvent.Wait(500))
-    {
-    case kEventSignaled:
-        if(_thread == NULL)
-        {
-            return false;
-        }
-        break;
-    case kEventError:
-        return false;
-    case kEventTimeout:
-        // No events triggered. No work to do.
-        return true;
-    }
-    CriticalSectionScoped lock( _critSec);
-
-    // Get the most recent frame to write to file (if any). Synchronize it with
-    // the audio stream (if any). Synchronization the video based on its render
-    // timestamp (i.e. VideoFrame::RenderTimeMS())
-    I420VideoFrame* frameToProcess = _videoFramesQueue->FrameToRecord();
-    if( frameToProcess == NULL)
-    {
-        return true;
-    }
-    int32_t error = 0;
-    if(!_videoOnly)
-    {
-        if(!_firstAudioFrameReceived)
-        {
-            // Video and audio can only be synchronized if both have been
-            // received.
-            return true;
-        }
-        error = ProcessAudio();
-
-        while (_writtenAudioMS > _writtenVideoMS)
-        {
-            error = EncodeAndWriteVideoToFile( *frameToProcess);
-            if( error != 0)
-            {
-                LOG(LS_ERROR) << "AviRecorder::Process() error writing to "
-                              << "file.";
-                break;
-            } else {
-                uint32_t frameLengthMS = 1000 /
-                    _videoCodecInst.maxFramerate;
-                _writtenVideoFramesCounter++;
-                _writtenVideoMS += frameLengthMS;
-                // A full seconds worth of frames have been written.
-                if(_writtenVideoFramesCounter%_videoCodecInst.maxFramerate == 0)
-                {
-                    // Frame rate is in frames per seconds. Frame length is
-                    // calculated as an integer division which means it may
-                    // be rounded down. Compensate for this every second.
-                    uint32_t rest = 1000 % frameLengthMS;
-                    _writtenVideoMS += rest;
-                }
-            }
-        }
-    } else {
-        // Frame rate is in frames per seconds. Frame length is calculated as an
-        // integer division which means it may be rounded down. This introduces
-        // drift. Once a full frame worth of drift has happened, skip writing
-        // one frame. Note that frame rate is in frames per second so the
-        // drift is completely compensated for.
-        uint32_t frameLengthMS = 1000/_videoCodecInst.maxFramerate;
-        uint32_t restMS = 1000 % frameLengthMS;
-        uint32_t frameSkip = (_videoCodecInst.maxFramerate *
-                              frameLengthMS) / restMS;
-
-        _writtenVideoFramesCounter++;
-        if(_writtenVideoFramesCounter % frameSkip == 0)
-        {
-            _writtenVideoMS += frameLengthMS;
-            return true;
-        }
-
-        error = EncodeAndWriteVideoToFile( *frameToProcess);
-        if(error != 0)
-        {
-            LOG(LS_ERROR) << "AviRecorder::Process() error writing to file.";
-        } else {
-            _writtenVideoMS += frameLengthMS;
-        }
-    }
-    return error == 0;
-}
-
-int32_t AviRecorder::EncodeAndWriteVideoToFile(I420VideoFrame& videoFrame)
-{
-    if (!IsRecording() || videoFrame.IsZeroSize())
-    {
-        return -1;
-    }
-
-    if(_frameScaler->ResizeFrameIfNeeded(&videoFrame, _videoCodecInst.width,
-                                         _videoCodecInst.height) != 0)
-    {
-        return -1;
-    }
-
-    _videoEncodedData.payloadSize = 0;
-
-    if( STR_CASE_CMP(_videoCodecInst.plName, "I420") == 0)
-    {
-       size_t length =
-           CalcBufferSize(kI420, videoFrame.width(), videoFrame.height());
-        _videoEncodedData.VerifyAndAllocate(length);
-
-        // I420 is raw data. No encoding needed (each sample is represented by
-        // 1 byte so there is no difference depending on endianness).
-        int ret_length = ExtractBuffer(videoFrame, length,
-                                       _videoEncodedData.payloadData);
-        if (ret_length < 0)
-          return -1;
-
-        _videoEncodedData.payloadSize = ret_length;
-        _videoEncodedData.frameType = kVideoFrameKey;
-    }else {
-        if( _videoEncoder->Encode(videoFrame, _videoEncodedData) != 0)
-        {
-            return -1;
-        }
-    }
-
-    if(_videoEncodedData.payloadSize > 0)
-    {
-        if(_moduleFile->IncomingAVIVideoData(
-               (int8_t*)(_videoEncodedData.payloadData),
-               _videoEncodedData.payloadSize))
-        {
-            LOG(LS_ERROR) << "Error writing AVI file.";
-            return -1;
-        }
-    } else {
-        LOG(LS_ERROR) << "FileRecorder::RecordVideoToFile() frame dropped by "
-                      << "encoder, bitrate likely too low.";
-    }
-    return 0;
-}
-
-// Store audio frame in the _audioFramesToWrite buffer. The writing to file
-// happens in AviRecorder::Process().
-int32_t AviRecorder::WriteEncodedAudioData(
-    const int8_t* audioBuffer,
-    size_t bufferLength,
-    uint16_t millisecondsOfData,
-    const TickTime* playoutTS)
-{
-    CriticalSectionScoped lock(_critSec);
-
-    if (!IsRecording())
-    {
-        return -1;
-    }
-    if (bufferLength > MAX_AUDIO_BUFFER_IN_BYTES)
-    {
-        return -1;
-    }
-    if (_videoOnly)
-    {
-        return -1;
-    }
-    if (_audioFramesToWrite.size() > kMaxAudioBufferQueueLength)
-    {
-        StopRecording();
-        return -1;
-    }
-    _firstAudioFrameReceived = true;
-
-    if(playoutTS)
-    {
-        _audioFramesToWrite.push_back(new AudioFrameFileInfo(audioBuffer,
-                                                             bufferLength,
-                                                             millisecondsOfData,
-                                                             *playoutTS));
-    } else {
-        _audioFramesToWrite.push_back(new AudioFrameFileInfo(audioBuffer,
-                                                             bufferLength,
-                                                             millisecondsOfData,
-                                                             TickTime::Now()));
-    }
-    _timeEvent.Set();
-    return 0;
-}
-
-#endif // WEBRTC_MODULE_UTILITY_VIDEO
 }  // namespace webrtc
diff --git a/webrtc/modules/utility/source/file_recorder_impl.h b/webrtc/modules/utility/source/file_recorder_impl.h
index 5593827..776654b 100644
--- a/webrtc/modules/utility/source/file_recorder_impl.h
+++ b/webrtc/modules/utility/source/file_recorder_impl.h
@@ -30,12 +30,6 @@
 #include "webrtc/system_wrappers/interface/tick_util.h"
 #include "webrtc/typedefs.h"
 
-#ifdef WEBRTC_MODULE_UTILITY_VIDEO
-    #include "webrtc/modules/utility/source/frame_scaler.h"
-    #include "webrtc/modules/utility/source/video_coder.h"
-    #include "webrtc/modules/utility/source/video_frames_queue.h"
-#endif
-
 namespace webrtc {
 // The largest decoded frame size in samples (60ms with 32kHz sample rate).
 enum { MAX_AUDIO_BUFFER_IN_SAMPLES = 60*32};
@@ -104,90 +98,5 @@
     AudioCoder _audioEncoder;
     Resampler _audioResampler;
 };
-
-
-#ifdef WEBRTC_MODULE_UTILITY_VIDEO
-class AudioFrameFileInfo
-{
-    public:
-       AudioFrameFileInfo(const int8_t* audioData,
-                     const size_t audioSize,
-                     const uint16_t audioMS,
-                     const TickTime& playoutTS)
-           : _audioData(), _audioSize(audioSize), _audioMS(audioMS),
-             _playoutTS(playoutTS)
-       {
-           if(audioSize > MAX_AUDIO_BUFFER_IN_BYTES)
-           {
-               assert(false);
-               _audioSize = 0;
-               return;
-           }
-           memcpy(_audioData, audioData, audioSize);
-       };
-    // TODO (hellner): either turn into a struct or provide get/set functions.
-    int8_t   _audioData[MAX_AUDIO_BUFFER_IN_BYTES];
-    size_t   _audioSize;
-    uint16_t _audioMS;
-    TickTime _playoutTS;
-};
-
-class AviRecorder : public FileRecorderImpl
-{
-public:
-    AviRecorder(uint32_t instanceID, FileFormats fileFormat);
-    virtual ~AviRecorder();
-
-    // FileRecorder functions.
-    virtual int32_t StartRecordingVideoFile(
-        const char* fileName,
-        const CodecInst& audioCodecInst,
-        const VideoCodec& videoCodecInst,
-        ACMAMRPackingFormat amrFormat = AMRFileStorage,
-        bool videoOnly = false);
-    virtual int32_t StopRecording();
-    virtual int32_t RecordVideoToFile(const I420VideoFrame& videoFrame);
-
-protected:
-    virtual int32_t WriteEncodedAudioData(
-        const int8_t*  audioBuffer,
-        size_t bufferLength,
-        uint16_t millisecondsOfData,
-        const TickTime* playoutTS);
-private:
-    typedef std::list<AudioFrameFileInfo*> AudioInfoList;
-    static bool Run(ThreadObj threadObj);
-    bool Process();
-
-    bool StartThread();
-    bool StopThread();
-
-    int32_t EncodeAndWriteVideoToFile(I420VideoFrame& videoFrame);
-    int32_t ProcessAudio();
-
-    size_t CalcI420FrameSize() const;
-    int32_t SetUpVideoEncoder();
-
-    VideoCodec _videoCodecInst;
-    bool _videoOnly;
-
-    AudioInfoList _audioFramesToWrite;
-    bool _firstAudioFrameReceived;
-
-    VideoFramesQueue* _videoFramesQueue;
-
-    FrameScaler* _frameScaler;
-    VideoCoder* _videoEncoder;
-    size_t _videoMaxPayloadSize;
-    EncodedVideoData _videoEncodedData;
-
-    ThreadWrapper* _thread;
-    EventWrapper& _timeEvent;
-    CriticalSectionWrapper* _critSec;
-    int64_t _writtenVideoFramesCounter;
-    int64_t _writtenAudioMS;
-    int64_t _writtenVideoMS;
-};
-#endif // WEBRTC_MODULE_UTILITY_VIDEO
 }  // namespace webrtc
 #endif // WEBRTC_MODULES_UTILITY_SOURCE_FILE_RECORDER_IMPL_H_
diff --git a/webrtc/modules/utility/source/frame_scaler.cc b/webrtc/modules/utility/source/frame_scaler.cc
deleted file mode 100644
index 50ccf8a..0000000
--- a/webrtc/modules/utility/source/frame_scaler.cc
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/utility/source/frame_scaler.h"
-
-#ifdef WEBRTC_MODULE_UTILITY_VIDEO
-
-#include "webrtc/common_video/libyuv/include/scaler.h"
-
-namespace webrtc {
-
-FrameScaler::FrameScaler()
-    : scaler_(new Scaler()),
-      scaled_frame_() {}
-
-FrameScaler::~FrameScaler() {}
-
-int FrameScaler::ResizeFrameIfNeeded(I420VideoFrame* video_frame,
-                                     int out_width,
-                                     int out_height) {
-  if (video_frame->IsZeroSize()) {
-    return -1;
-  }
-
-  if ((video_frame->width() != out_width) ||
-      (video_frame->height() != out_height)) {
-    // Set correct scale settings and scale |video_frame| into |scaled_frame_|.
-    scaler_->Set(video_frame->width(), video_frame->height(), out_width,
-                 out_height, kI420, kI420, kScaleBox);
-    int ret = scaler_->Scale(*video_frame, &scaled_frame_);
-    if (ret < 0) {
-      return ret;
-    }
-
-    scaled_frame_.set_render_time_ms(video_frame->render_time_ms());
-    scaled_frame_.set_timestamp(video_frame->timestamp());
-    video_frame->SwapFrame(&scaled_frame_);
-  }
-  return 0;
-}
-
-}  // namespace webrtc
-
-#endif  // WEBRTC_MODULE_UTILITY_VIDEO
diff --git a/webrtc/modules/utility/source/frame_scaler.h b/webrtc/modules/utility/source/frame_scaler.h
deleted file mode 100644
index 0aaafa4..0000000
--- a/webrtc/modules/utility/source/frame_scaler.h
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-// This file implements a class that can be used for scaling frames.
-
-#ifndef WEBRTC_MODULES_UTILITY_SOURCE_FRAME_SCALER_H_
-#define WEBRTC_MODULES_UTILITY_SOURCE_FRAME_SCALER_H_
-
-#ifdef WEBRTC_MODULE_UTILITY_VIDEO
-
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/common_video/interface/i420_video_frame.h"
-#include "webrtc/engine_configurations.h"
-#include "webrtc/modules/interface/module_common_types.h"
-
-namespace webrtc {
-
-class Scaler;
-class VideoFrame;
-
-class FrameScaler {
- public:
-    FrameScaler();
-    ~FrameScaler();
-
-    // Re-sizes |video_frame| so that it has the width |out_width| and height
-    // |out_height|.
-    int ResizeFrameIfNeeded(I420VideoFrame* video_frame,
-                            int out_width,
-                            int out_height);
-
- private:
-  rtc::scoped_ptr<Scaler> scaler_;
-    I420VideoFrame scaled_frame_;
-};
-
-}  // namespace webrtc
-
-#endif  // WEBRTC_MODULE_UTILITY_VIDEO
-
-#endif  // WEBRTC_MODULES_UTILITY_SOURCE_FRAME_SCALER_H_
diff --git a/webrtc/modules/utility/source/video_coder.cc b/webrtc/modules/utility/source/video_coder.cc
deleted file mode 100644
index 957826c..0000000
--- a/webrtc/modules/utility/source/video_coder.cc
+++ /dev/null
@@ -1,132 +0,0 @@
-/*
- *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifdef WEBRTC_MODULE_UTILITY_VIDEO
-
-#include "webrtc/modules/utility/source/video_coder.h"
-#include "webrtc/modules/video_coding/main/source/encoded_frame.h"
-
-namespace webrtc {
-VideoCoder::VideoCoder()
-    : _vcm(VideoCodingModule::Create(nullptr)), _decodedVideo(0) {
-    _vcm->InitializeSender();
-    _vcm->InitializeReceiver();
-
-    _vcm->RegisterTransportCallback(this);
-    _vcm->RegisterReceiveCallback(this);
-}
-
-VideoCoder::~VideoCoder()
-{
-    VideoCodingModule::Destroy(_vcm);
-}
-
-int32_t VideoCoder::SetEncodeCodec(VideoCodec& videoCodecInst,
-                                   uint32_t numberOfCores,
-                                   uint32_t maxPayloadSize)
-{
-    if(_vcm->RegisterSendCodec(&videoCodecInst, numberOfCores,
-                               maxPayloadSize) != VCM_OK)
-    {
-        return -1;
-    }
-    return 0;
-}
-
-
-int32_t VideoCoder::SetDecodeCodec(VideoCodec& videoCodecInst,
-                                   int32_t numberOfCores)
-{
-    if (videoCodecInst.plType == 0)
-    {
-        int8_t plType = DefaultPayloadType(videoCodecInst.plName);
-        if (plType == -1)
-        {
-            return -1;
-        }
-        videoCodecInst.plType = plType;
-    }
-
-    if(_vcm->RegisterReceiveCodec(&videoCodecInst, numberOfCores) != VCM_OK)
-    {
-        return -1;
-    }
-    return 0;
-}
-
-int32_t VideoCoder::Decode(I420VideoFrame& decodedVideo,
-                           const EncodedVideoData& encodedData)
-{
-    decodedVideo.ResetSize();
-    if(encodedData.payloadSize <= 0)
-    {
-        return -1;
-    }
-
-    _decodedVideo = &decodedVideo;
-    return 0;
-}
-
-
-int32_t VideoCoder::Encode(const I420VideoFrame& videoFrame,
-                           EncodedVideoData& videoEncodedData)
-{
-    // The AddVideoFrame(..) call will (indirectly) call SendData(). Store a
-    // pointer to videoFrame so that it can be updated.
-    _videoEncodedData = &videoEncodedData;
-    videoEncodedData.payloadSize = 0;
-    if(_vcm->AddVideoFrame(videoFrame) != VCM_OK)
-    {
-        return -1;
-    }
-    return 0;
-}
-
-int8_t VideoCoder::DefaultPayloadType(const char* plName)
-{
-    VideoCodec tmpCodec;
-    int32_t numberOfCodecs = _vcm->NumberOfCodecs();
-    for (uint8_t i = 0; i < numberOfCodecs; i++)
-    {
-        _vcm->Codec(i, &tmpCodec);
-        if(strncmp(tmpCodec.plName, plName, kPayloadNameSize) == 0)
-        {
-            return tmpCodec.plType;
-        }
-    }
-    return -1;
-}
-
-int32_t VideoCoder::FrameToRender(I420VideoFrame& videoFrame)
-{
-    return _decodedVideo->CopyFrame(videoFrame);
-}
-
-int32_t VideoCoder::SendData(
-    const uint8_t payloadType,
-    const EncodedImage& encoded_image,
-    const RTPFragmentationHeader& fragmentationHeader,
-    const RTPVideoHeader* /*rtpVideoHdr*/)
-{
-    // Store the data in _videoEncodedData which is a pointer to videoFrame in
-    // Encode(..)
-    _videoEncodedData->VerifyAndAllocate(encoded_image._length);
-    _videoEncodedData->frameType =
-        VCMEncodedFrame::ConvertFrameType(encoded_image._frameType);
-    _videoEncodedData->payloadType = payloadType;
-    _videoEncodedData->timeStamp = encoded_image._timeStamp;
-    _videoEncodedData->fragmentationHeader.CopyFrom(fragmentationHeader);
-    memcpy(_videoEncodedData->payloadData, encoded_image._buffer,
-           sizeof(uint8_t) * encoded_image._length);
-    _videoEncodedData->payloadSize = encoded_image._length;
-    return 0;
-}
-}  // namespace webrtc
-#endif // WEBRTC_MODULE_UTILITY_VIDEO
diff --git a/webrtc/modules/utility/source/video_coder.h b/webrtc/modules/utility/source/video_coder.h
deleted file mode 100644
index 5695f5e..0000000
--- a/webrtc/modules/utility/source/video_coder.h
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_UTILITY_SOURCE_VIDEO_CODER_H_
-#define WEBRTC_MODULES_UTILITY_SOURCE_VIDEO_CODER_H_
-
-#ifdef WEBRTC_MODULE_UTILITY_VIDEO
-
-#include "webrtc/engine_configurations.h"
-#include "webrtc/modules/video_coding/main/interface/video_coding.h"
-
-namespace webrtc {
-class VideoCoder : public VCMPacketizationCallback, public VCMReceiveCallback
-{
-public:
-    VideoCoder();
-    ~VideoCoder();
-
-    int32_t SetEncodeCodec(VideoCodec& videoCodecInst,
-                           uint32_t numberOfCores,
-                           uint32_t maxPayloadSize);
-
-
-    // Select the codec that should be used for decoding. videoCodecInst.plType
-    // will be set to the codec's default payload type.
-    int32_t SetDecodeCodec(VideoCodec& videoCodecInst, int32_t numberOfCores);
-
-    int32_t Decode(I420VideoFrame& decodedVideo,
-                   const EncodedVideoData& encodedData);
-
-    int32_t Encode(const I420VideoFrame& videoFrame,
-                   EncodedVideoData& videoEncodedData);
-
-    int8_t DefaultPayloadType(const char* plName);
-
-private:
-    // VCMReceiveCallback function.
-    // Note: called by VideoCodingModule when decoding finished.
-    virtual int32_t FrameToRender(I420VideoFrame& videoFrame) OVERRIDE;
-
-    // VCMPacketizationCallback function.
-    // Note: called by VideoCodingModule when encoding finished.
-    virtual int32_t SendData(
-        uint8_t /*payloadType*/,
-        const EncodedImage& encoded_image,
-        const RTPFragmentationHeader& /* fragmentationHeader*/,
-        const RTPVideoHeader* rtpTypeHdr) OVERRIDE;
-
-    VideoCodingModule* _vcm;
-    I420VideoFrame* _decodedVideo;
-    EncodedVideoData* _videoEncodedData;
-};
-}  // namespace webrtc
-#endif // WEBRTC_MODULE_UTILITY_VIDEO
-#endif // WEBRTC_MODULES_UTILITY_SOURCE_VIDEO_CODER_H_
diff --git a/webrtc/modules/utility/source/video_frames_queue.cc b/webrtc/modules/utility/source/video_frames_queue.cc
deleted file mode 100644
index 9ade8b5..0000000
--- a/webrtc/modules/utility/source/video_frames_queue.cc
+++ /dev/null
@@ -1,112 +0,0 @@
-/*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/utility/source/video_frames_queue.h"
-
-#ifdef WEBRTC_MODULE_UTILITY_VIDEO
-
-#include <assert.h>
-
-#include "webrtc/common_video/interface/texture_video_frame.h"
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/system_wrappers/interface/logging.h"
-#include "webrtc/system_wrappers/interface/tick_util.h"
-
-namespace webrtc {
-VideoFramesQueue::VideoFramesQueue()
-    : _renderDelayMs(10)
-{
-}
-
-VideoFramesQueue::~VideoFramesQueue() {
-  for (FrameList::iterator iter = _incomingFrames.begin();
-       iter != _incomingFrames.end(); ++iter) {
-      delete *iter;
-  }
-  for (FrameList::iterator iter = _emptyFrames.begin();
-       iter != _emptyFrames.end(); ++iter) {
-      delete *iter;
-  }
-}
-
-int32_t VideoFramesQueue::AddFrame(const I420VideoFrame& newFrame) {
-  if (newFrame.native_handle() != NULL) {
-    _incomingFrames.push_back(newFrame.CloneFrame());
-    return 0;
-  }
-
-  I420VideoFrame* ptrFrameToAdd = NULL;
-  // Try to re-use a VideoFrame. Only allocate new memory if it is necessary.
-  if (!_emptyFrames.empty()) {
-    ptrFrameToAdd = _emptyFrames.front();
-    _emptyFrames.pop_front();
-  }
-  if (!ptrFrameToAdd) {
-    if (_emptyFrames.size() + _incomingFrames.size() >
-        KMaxNumberOfFrames) {
-      LOG(LS_WARNING) << "Too many frames, limit: " << KMaxNumberOfFrames;
-      return -1;
-    }
-    ptrFrameToAdd = new I420VideoFrame();
-  }
-  ptrFrameToAdd->CopyFrame(newFrame);
-  _incomingFrames.push_back(ptrFrameToAdd);
-  return 0;
-}
-
-// Find the most recent frame that has a VideoFrame::RenderTimeMs() that is
-// lower than current time in ms (TickTime::MillisecondTimestamp()).
-// Note _incomingFrames is sorted so that the oldest frame is first.
-// Recycle all frames that are older than the most recent frame.
-I420VideoFrame* VideoFramesQueue::FrameToRecord() {
-  I420VideoFrame* ptrRenderFrame = NULL;
-  for (FrameList::iterator iter = _incomingFrames.begin();
-       iter != _incomingFrames.end(); ++iter) {
-    I420VideoFrame* ptrOldestFrameInList = *iter;
-    if (ptrOldestFrameInList->render_time_ms() <=
-        TickTime::MillisecondTimestamp() + _renderDelayMs) {
-      // List is traversed beginning to end. If ptrRenderFrame is not
-      // NULL it must be the first, and thus oldest, VideoFrame in the
-      // queue. It can be recycled.
-      if (ptrRenderFrame) {
-        ReturnFrame(ptrRenderFrame);
-       _incomingFrames.pop_front();
-      }
-      ptrRenderFrame = ptrOldestFrameInList;
-    } else {
-      // All VideoFrames following this one will be even newer. No match
-      // will be found.
-      break;
-    }
-  }
-  return ptrRenderFrame;
-}
-
-int32_t VideoFramesQueue::ReturnFrame(I420VideoFrame* ptrOldFrame) {
-  // No need to reuse texture frames because they do not allocate memory.
-  if (ptrOldFrame->native_handle() == NULL) {
-    ptrOldFrame->set_timestamp(0);
-    ptrOldFrame->set_width(0);
-    ptrOldFrame->set_height(0);
-    ptrOldFrame->set_render_time_ms(0);
-    ptrOldFrame->ResetSize();
-    _emptyFrames.push_back(ptrOldFrame);
-  } else {
-    delete ptrOldFrame;
-  }
-  return 0;
-}
-
-int32_t VideoFramesQueue::SetRenderDelay(uint32_t renderDelay) {
-  _renderDelayMs = renderDelay;
-  return 0;
-}
-}  // namespace webrtc
-#endif // WEBRTC_MODULE_UTILITY_VIDEO
diff --git a/webrtc/modules/utility/source/video_frames_queue.h b/webrtc/modules/utility/source/video_frames_queue.h
deleted file mode 100644
index afc64d9..0000000
--- a/webrtc/modules/utility/source/video_frames_queue.h
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_UTILITY_SOURCE_VIDEO_FRAMES_QUEUE_H_
-#define WEBRTC_MODULES_UTILITY_SOURCE_VIDEO_FRAMES_QUEUE_H_
-
-#ifdef WEBRTC_MODULE_UTILITY_VIDEO
-
-#include <list>
-
-#include "webrtc/common_video/interface/i420_video_frame.h"
-#include "webrtc/engine_configurations.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-class VideoFramesQueue {
- public:
-  VideoFramesQueue();
-  ~VideoFramesQueue();
-
-  // Put newFrame (last) in the queue.
-  int32_t AddFrame(const I420VideoFrame& newFrame);
-
-  // Return the most current frame. I.e. the frame with the highest
-  // VideoFrame::RenderTimeMs() that is lower than
-  // TickTime::MillisecondTimestamp().
-  I420VideoFrame* FrameToRecord();
-
-  // Set the render delay estimate to renderDelay ms.
-  int32_t SetRenderDelay(uint32_t renderDelay);
-
- protected:
-  // Make ptrOldFrame available for re-use. I.e. put it in the empty frames
-  // queue.
-  int32_t ReturnFrame(I420VideoFrame* ptrOldFrame);
-
- private:
-  typedef std::list<I420VideoFrame*> FrameList;
-  // Don't allow the buffer to expand beyond KMaxNumberOfFrames VideoFrames.
-  // 300 frames correspond to 10 seconds worth of frames at 30 fps.
-  enum {KMaxNumberOfFrames = 300};
-
-  // List of VideoFrame pointers. The list is sorted in the order of when the
-  // VideoFrame was inserted into the list. The first VideoFrame in the list
-  // was inserted first.
-  FrameList    _incomingFrames;
-  // A list of frames that are free to be re-used.
-  FrameList    _emptyFrames;
-
-  // Estimated render delay.
-  uint32_t _renderDelayMs;
-};
-}  // namespace webrtc
-#endif // WEBRTC_MODULE_UTILITY_VIDEO
-#endif  // WEBRTC_MODULES_UTILITY_SOURCE_VIDEO_FRAMES_QUEUE_H_
diff --git a/webrtc/modules/utility/utility.gypi b/webrtc/modules/utility/utility.gypi
index c39a18f..46014e8 100644
--- a/webrtc/modules/utility/utility.gypi
+++ b/webrtc/modules/utility/utility.gypi
@@ -37,18 +37,6 @@
         'source/rtp_dump_impl.cc',
         'source/rtp_dump_impl.h',
       ],
-      'conditions': [
-        ['enable_video==1', {
-          'dependencies': [
-            'webrtc_video_coding',
-          ],
-          'sources': [
-            'source/frame_scaler.cc',
-            'source/video_coder.cc',
-            'source/video_frames_queue.cc',
-          ],
-        }],
-      ],
     },
   ], # targets
 }